From 1160eb4421d5ea51b8bd38f4c41cd2d39efec3ae Mon Sep 17 00:00:00 2001 From: Zark Date: Thu, 1 Jun 2006 19:08:57 +0000 Subject: [PATCH 001/268] initial import from CVS export git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@1 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- AUTHORS | 1 + SConstruct | 137 +++ doc/doxyfile | 232 +++++ doc/footer.html | 23 + doc/header.html | 23 + doc/jsoncpp.dox | 82 ++ doc/readme.txt | 1 + include/json/autolink.h | 105 +++ include/json/json.h | 9 + include/json/json_autolink.h | 19 + include/json/json_config.h | 28 + include/json/json_forwards.h | 18 + include/json/json_reader.h | 139 +++ include/json/json_util.h | 12 + include/json/json_value.h | 340 +++++++ include/json/json_writer.h | 93 ++ makefiles/vs71/jsoncpp.sln | 30 + src/jsontest/jsontest.vcproj | 119 +++ src/jsontest/main.cpp | 180 ++++ src/jsontest/sconscript | 6 + src/lib_json/json_reader.cpp | 701 ++++++++++++++ src/lib_json/json_value.cpp | 1195 ++++++++++++++++++++++++ src/lib_json/json_writer.cpp | 410 ++++++++ src/lib_json/lib_json.vcproj | 202 ++++ src/lib_json/sconscript | 8 + test/cleantests.py | 10 + test/generate_expected.py | 11 + test/jsontestrunner.py | 64 ++ test/runjsontests.py | 91 ++ test/test_array_01.expected | 1 + test/test_array_01.json | 1 + test/test_array_02.expected | 2 + test/test_array_02.json | 1 + test/test_array_03.expected | 6 + test/test_array_03.json | 1 + test/test_array_04.expected | 5 + test/test_array_04.json | 1 + test/test_array_05.expected | 100 ++ test/test_array_05.json | 1 + test/test_array_06.expected | 5 + test/test_array_06.json | 4 + test/test_basic_01.expected | 1 + test/test_basic_01.json | 1 + test/test_basic_02.expected | 1 + test/test_basic_02.json | 1 + test/test_basic_03.expected | 3 + test/test_basic_03.json | 3 + test/test_basic_04.expected | 2 + test/test_basic_04.json | 2 + test/test_basic_05.expected | 2 + test/test_basic_05.json | 2 + test/test_basic_06.expected | 2 + test/test_basic_06.json | 2 + test/test_basic_07.expected | 2 + test/test_basic_07.json | 2 + test/test_basic_08.expected | 2 + test/test_basic_08.json | 3 + test/test_basic_09.expected | 2 + test/test_basic_09.json | 4 + test/test_complex_01.expected | 20 + test/test_complex_01.json | 17 + test/test_integer_01.expected | 1 + test/test_integer_01.json | 2 + test/test_integer_02.expected | 1 + test/test_integer_02.json | 2 + test/test_integer_03.expected | 1 + test/test_integer_03.json | 2 + test/test_integer_04.expected | 2 + test/test_integer_04.json | 3 + test/test_integer_05.expected | 2 + test/test_integer_05.json | 2 + test/test_object_01.expected | 1 + test/test_object_01.json | 1 + test/test_object_02.expected | 2 + test/test_object_02.json | 1 + test/test_object_03.expected | 4 + test/test_object_03.json | 5 + test/test_preserve_comment_01.expected | 3 + test/test_preserve_comment_01.json | 14 + test/test_real_01.expected | 2 + test/test_real_01.json | 3 + test/test_real_02.expected | 2 + test/test_real_02.json | 3 + test/test_real_03.expected | 2 + test/test_real_03.json | 3 + test/test_real_04.expected | 2 + test/test_real_04.json | 3 + test/test_real_05.expected | 3 + test/test_real_05.json | 3 + test/test_real_06.expected | 3 + test/test_real_06.json | 3 + test/test_real_07.expected | 3 + test/test_real_07.json | 3 + 93 files changed, 4578 insertions(+) create mode 100644 AUTHORS create mode 100644 SConstruct create mode 100644 doc/doxyfile create mode 100644 doc/footer.html create mode 100644 doc/header.html create mode 100644 doc/jsoncpp.dox create mode 100644 doc/readme.txt create mode 100644 include/json/autolink.h create mode 100644 include/json/json.h create mode 100644 include/json/json_autolink.h create mode 100644 include/json/json_config.h create mode 100644 include/json/json_forwards.h create mode 100644 include/json/json_reader.h create mode 100644 include/json/json_util.h create mode 100644 include/json/json_value.h create mode 100644 include/json/json_writer.h create mode 100644 makefiles/vs71/jsoncpp.sln create mode 100644 src/jsontest/jsontest.vcproj create mode 100644 src/jsontest/main.cpp create mode 100644 src/jsontest/sconscript create mode 100644 src/lib_json/json_reader.cpp create mode 100644 src/lib_json/json_value.cpp create mode 100644 src/lib_json/json_writer.cpp create mode 100644 src/lib_json/lib_json.vcproj create mode 100644 src/lib_json/sconscript create mode 100644 test/cleantests.py create mode 100644 test/generate_expected.py create mode 100644 test/jsontestrunner.py create mode 100644 test/runjsontests.py create mode 100644 test/test_array_01.expected create mode 100644 test/test_array_01.json create mode 100644 test/test_array_02.expected create mode 100644 test/test_array_02.json create mode 100644 test/test_array_03.expected create mode 100644 test/test_array_03.json create mode 100644 test/test_array_04.expected create mode 100644 test/test_array_04.json create mode 100644 test/test_array_05.expected create mode 100644 test/test_array_05.json create mode 100644 test/test_array_06.expected create mode 100644 test/test_array_06.json create mode 100644 test/test_basic_01.expected create mode 100644 test/test_basic_01.json create mode 100644 test/test_basic_02.expected create mode 100644 test/test_basic_02.json create mode 100644 test/test_basic_03.expected create mode 100644 test/test_basic_03.json create mode 100644 test/test_basic_04.expected create mode 100644 test/test_basic_04.json create mode 100644 test/test_basic_05.expected create mode 100644 test/test_basic_05.json create mode 100644 test/test_basic_06.expected create mode 100644 test/test_basic_06.json create mode 100644 test/test_basic_07.expected create mode 100644 test/test_basic_07.json create mode 100644 test/test_basic_08.expected create mode 100644 test/test_basic_08.json create mode 100644 test/test_basic_09.expected create mode 100644 test/test_basic_09.json create mode 100644 test/test_complex_01.expected create mode 100644 test/test_complex_01.json create mode 100644 test/test_integer_01.expected create mode 100644 test/test_integer_01.json create mode 100644 test/test_integer_02.expected create mode 100644 test/test_integer_02.json create mode 100644 test/test_integer_03.expected create mode 100644 test/test_integer_03.json create mode 100644 test/test_integer_04.expected create mode 100644 test/test_integer_04.json create mode 100644 test/test_integer_05.expected create mode 100644 test/test_integer_05.json create mode 100644 test/test_object_01.expected create mode 100644 test/test_object_01.json create mode 100644 test/test_object_02.expected create mode 100644 test/test_object_02.json create mode 100644 test/test_object_03.expected create mode 100644 test/test_object_03.json create mode 100644 test/test_preserve_comment_01.expected create mode 100644 test/test_preserve_comment_01.json create mode 100644 test/test_real_01.expected create mode 100644 test/test_real_01.json create mode 100644 test/test_real_02.expected create mode 100644 test/test_real_02.json create mode 100644 test/test_real_03.expected create mode 100644 test/test_real_03.json create mode 100644 test/test_real_04.expected create mode 100644 test/test_real_04.json create mode 100644 test/test_real_05.expected create mode 100644 test/test_real_05.json create mode 100644 test/test_real_06.expected create mode 100644 test/test_real_06.json create mode 100644 test/test_real_07.expected create mode 100644 test/test_real_07.json diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..333e120 --- /dev/null +++ b/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/SConstruct b/SConstruct new file mode 100644 index 0000000..8e00aba --- /dev/null +++ b/SConstruct @@ -0,0 +1,137 @@ +import os +import os.path +import sys + +options = Options() +options.Add( EnumOption('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +env = Environment( ENV = {'PATH' : os.environ['PATH']}, + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( LIBS = ['pthreads'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform == 'linux-gcc': + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'] ) +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention + +env_testing = env.Copy( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Copy() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + +Export( 'env env_testing buildJSONExample buildLibary buildJSONTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source), jsontest_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +env.Alias( 'check' ) + +buildProjectInDirectory( 'src/jsontest' ) +buildProjectInDirectory( 'src/lib_json' ) diff --git a/doc/doxyfile b/doc/doxyfile new file mode 100644 index 0000000..e3a4085 --- /dev/null +++ b/doc/doxyfile @@ -0,0 +1,232 @@ +# Doxyfile 1.4.3 + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- +PROJECT_NAME = "JsonCpp" +PROJECT_NUMBER = 0.0 +OUTPUT_DIRECTORY = docbuild +CREATE_SUBDIRS = NO +OUTPUT_LANGUAGE = English +USE_WINDOWS_ENCODING = NO +BRIEF_MEMBER_DESC = YES +REPEAT_BRIEF = YES +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the +ALWAYS_DETAILED_SEC = NO +INLINE_INHERITED_MEMB = NO +FULL_PATH_NAMES = YES +STRIP_FROM_PATH = E:\prg\vc\Lib\jsoncpp +STRIP_FROM_INC_PATH = +SHORT_NAMES = NO +JAVADOC_AUTOBRIEF = NO +MULTILINE_CPP_IS_BRIEF = NO +DETAILS_AT_TOP = NO +INHERIT_DOCS = YES +DISTRIBUTE_GROUP_DOC = NO +SEPARATE_MEMBER_PAGES = NO +TAB_SIZE = 8 +ALIASES = +OPTIMIZE_OUTPUT_FOR_C = NO +OPTIMIZE_OUTPUT_JAVA = NO +SUBGROUPING = YES +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- +EXTRACT_ALL = YES +EXTRACT_PRIVATE = NO +EXTRACT_STATIC = NO +EXTRACT_LOCAL_CLASSES = NO +EXTRACT_LOCAL_METHODS = NO +HIDE_UNDOC_MEMBERS = NO +HIDE_UNDOC_CLASSES = NO +HIDE_FRIEND_COMPOUNDS = NO +HIDE_IN_BODY_DOCS = NO +INTERNAL_DOCS = NO +CASE_SENSE_NAMES = NO +HIDE_SCOPE_NAMES = NO +SHOW_INCLUDE_FILES = YES +INLINE_INFO = YES +SORT_MEMBER_DOCS = YES +SORT_BRIEF_DOCS = NO +SORT_BY_SCOPE_NAME = NO +GENERATE_TODOLIST = YES +GENERATE_TESTLIST = YES +GENERATE_BUGLIST = YES +GENERATE_DEPRECATEDLIST= YES +ENABLED_SECTIONS = +MAX_INITIALIZER_LINES = 30 +SHOW_USED_FILES = YES +SHOW_DIRECTORIES = YES +FILE_VERSION_FILTER = +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- +QUIET = NO +WARNINGS = YES +WARN_IF_UNDOCUMENTED = YES +WARN_IF_DOC_ERROR = YES +WARN_NO_PARAMDOC = NO +WARN_FORMAT = "$file:$line: $text" +WARN_LOGFILE = jsoncpp-doxygen-warning.log +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- +INPUT = ../include/json ../src/lib_json . +FILE_PATTERNS = *.h *.cpp *.dox +RECURSIVE = NO +EXCLUDE = +EXCLUDE_SYMLINKS = NO +EXCLUDE_PATTERNS = +EXAMPLE_PATH = +EXAMPLE_PATTERNS = * +EXAMPLE_RECURSIVE = NO +IMAGE_PATH = +INPUT_FILTER = +FILTER_PATTERNS = +FILTER_SOURCE_FILES = NO +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- +SOURCE_BROWSER = YES +INLINE_SOURCES = NO +STRIP_CODE_COMMENTS = YES +REFERENCED_BY_RELATION = YES +REFERENCES_RELATION = YES +USE_HTAGS = NO +VERBATIM_HEADERS = YES +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- +ALPHABETICAL_INDEX = NO +COLS_IN_ALPHA_INDEX = 5 +IGNORE_PREFIX = +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- +GENERATE_HTML = YES +HTML_OUTPUT = html +HTML_FILE_EXTENSION = .html +HTML_HEADER = header.html +HTML_FOOTER = footer.html +HTML_STYLESHEET = +HTML_ALIGN_MEMBERS = YES +GENERATE_HTMLHELP = NO +CHM_FILE = jsoncpp.chm +HHC_LOCATION = +GENERATE_CHI = NO +BINARY_TOC = NO +TOC_EXPAND = NO +DISABLE_INDEX = NO +ENUM_VALUES_PER_LINE = 4 +GENERATE_TREEVIEW = NO +TREEVIEW_WIDTH = 250 +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- +GENERATE_LATEX = NO +LATEX_OUTPUT = latex +LATEX_CMD_NAME = latex +MAKEINDEX_CMD_NAME = makeindex +COMPACT_LATEX = NO +PAPER_TYPE = a4wide +EXTRA_PACKAGES = +LATEX_HEADER = +PDF_HYPERLINKS = NO +USE_PDFLATEX = NO +LATEX_BATCHMODE = NO +LATEX_HIDE_INDICES = NO +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- +GENERATE_RTF = NO +RTF_OUTPUT = rtf +COMPACT_RTF = NO +RTF_HYPERLINKS = NO +RTF_STYLESHEET_FILE = +RTF_EXTENSIONS_FILE = +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- +GENERATE_MAN = NO +MAN_OUTPUT = man +MAN_EXTENSION = .3 +MAN_LINKS = NO +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- +GENERATE_XML = NO +XML_OUTPUT = xml +XML_SCHEMA = +XML_DTD = +XML_PROGRAMLISTING = YES +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- +GENERATE_AUTOGEN_DEF = NO +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- +GENERATE_PERLMOD = NO +PERLMOD_LATEX = NO +PERLMOD_PRETTY = YES +PERLMOD_MAKEVAR_PREFIX = +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- +ENABLE_PREPROCESSING = YES +MACRO_EXPANSION = NO +EXPAND_ONLY_PREDEF = NO +SEARCH_INCLUDES = YES +INCLUDE_PATH = ../include +INCLUDE_FILE_PATTERNS = *.h +PREDEFINED = +EXPAND_AS_DEFINED = +SKIP_FUNCTION_MACROS = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- +TAGFILES = +GENERATE_TAGFILE = +ALLEXTERNALS = NO +EXTERNAL_GROUPS = YES +PERL_PATH = /usr/bin/perl +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- +CLASS_DIAGRAMS = NO +HIDE_UNDOC_RELATIONS = YES +HAVE_DOT = NO +CLASS_GRAPH = YES +COLLABORATION_GRAPH = YES +GROUP_GRAPHS = YES +UML_LOOK = NO +TEMPLATE_RELATIONS = NO +INCLUDE_GRAPH = YES +INCLUDED_BY_GRAPH = YES +CALL_GRAPH = NO +GRAPHICAL_HIERARCHY = YES +DIRECTORY_GRAPH = YES +DOT_IMAGE_FORMAT = png +DOT_PATH = +DOTFILE_DIRS = +MAX_DOT_GRAPH_WIDTH = 1024 +MAX_DOT_GRAPH_HEIGHT = 1024 +MAX_DOT_GRAPH_DEPTH = 1000 +DOT_TRANSPARENT = NO +DOT_MULTI_TARGETS = NO +GENERATE_LEGEND = YES +DOT_CLEANUP = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to the search engine +#--------------------------------------------------------------------------- +SEARCHENGINE = NO diff --git a/doc/footer.html b/doc/footer.html new file mode 100644 index 0000000..56df7a4 --- /dev/null +++ b/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/doc/header.html b/doc/header.html new file mode 100644 index 0000000..cb4c006 --- /dev/null +++ b/doc/header.html @@ -0,0 +1,23 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox new file mode 100644 index 0000000..70a71fe --- /dev/null +++ b/doc/jsoncpp.dox @@ -0,0 +1,82 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + indent : 3 +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndent( root.get("indent", 3).asInt() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"] = getCurrentIndent(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); +\endcode + + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/doc/readme.txt b/doc/readme.txt new file mode 100644 index 0000000..499422e --- /dev/null +++ b/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/include/json/autolink.h b/include/json/autolink.h new file mode 100644 index 0000000..8051945 --- /dev/null +++ b/include/json/autolink.h @@ -0,0 +1,105 @@ +// No gards, this header can be included multiple time + +// Generic header to automatically link against a specified library +// The library name prefix must be defined in CPPTL_AUTOLINK_NAME. +// CPPTL_AUTOLINK_NAME will be undefined after including this header. + +// The full library name is build according to the following rules: +// (0) CPPTL_AUTOLINK_NAME: library name prefix (json,...) +// (a) TOOLSET: vc6, vc70, vc71, vc80, bcb4, bcb5, bcb6 +// (b) LINKAGE: lib(static), dll(dynamic) +// The macro CPPTL_AUTOLINK_DLL must be defined to indicate that we are linking +// against a DLL. +// (c) This suffix depends on threading mode and CRT linkage +// This suffix follow Microsoft Visual Studio c++ compiler command-line option +// used to select the CRT library (/mt, /mtd...) +// Threading / Run-time library / suffix +// single / static / ml +// mutli-thread / static / mt +// multi-thread / dynamic library / md +// (e) DEBUG MODE: nothing (release), d(debug) +// FULLNAME: 0_(a)_bcd.lib +// Example: +// Compiling library "cpptl" with vc 7.1 as a static library with debug dll CRT (/MDD) +// "cpptl_vc71_libmdd" +#if !defined(CPPTL_AUTOLINK_NAME) +# error Macro CPPTL_AUTOLINK_NAME should be defined. You should not include this header directly. +#endif + +#undef CPPTL_AUTOLINK_TOOLSET_ +#undef CPPTL_AUTOLINK_CRT_ +#undef CPPTL_AUTOLINK_LINKAGE_ +#undef CPPTL_AUTOLINK_DEBUG_MODE_ + +// Select compiler +// Visual Studio +#if defined(_MSC_VER) +# if defined(_WIN32_WCE) +# define CPPTL_AUTOLINK_TOOLSET_ "evc4" +# elif (_MSC_VER < 1300) //VC6 +# define CPPTL_AUTOLINK_TOOLSET_ "vc6" +# elif (_MSC_VER < 1310) //VC7.0 (.NET 2002) +# define CPPTL_AUTOLINK_TOOLSET_ "vc70" +# elif (_MSC_VER < 1400) //VC7.1 (.NET 2003) +# define CPPTL_AUTOLINK_TOOLSET_ "vc71" +# else +# define CPPTL_AUTOLINK_TOOLSET_ "vc80" +# endif +// Borland C++ +#elif defined(__BORLANDC__) +# if (__BORLANDC__ >= 0x560) // CBuilder 6 +# define CPPTL_AUTOLINK_TOOLSET_ "bcb6" +# elif (__BORLANDC__ >= 0x550) +# define CPPTL_AUTOLINK_TOOLSET_ "bcb5" +# elif (__BORLANDC__ >= 0x540) +# define CPPTL_AUTOLINK_TOOLSET_ "bcb4" +# endif +#endif + +// Select CRT library: threading & linkage +#if defined(_MT) || defined(__MT__) +# if defined(_DLL) +# define CPPTL_AUTOLINK_CRT_ "md" +# else +# define CPPTL_AUTOLINK_CRT_ "mt" +# endif +#else +# define CPPTL_AUTOLINK_CRT_ "ml" +#endif + +// Select debug mode +#if defined(_DEBUG) +# define CPPTL_AUTOLINK_DEBUG_MODE_ "d" +#else +# define CPPTL_AUTOLINK_DEBUG_MODE_ "" +#endif + +// Select linkage +#if defined(CPPTL_AUTOLINK_DLL) +# define CPPTL_AUTOLINK_LINKAGE_ "dll" +#else +# define CPPTL_AUTOLINK_LINKAGE_ "lib" +#endif + +// Automatic link +#if defined(CPPTL_AUTOLINK_TOOLSET_) && \ + defined(CPPTL_AUTOLINK_CRT_) && \ + defined(CPPTL_AUTOLINK_LINKAGE_) && \ + defined(CPPTL_AUTOLINK_DEBUG_MODE_) +# define CPPTL_AUTOLINK_FULL_NAME \ + CPPTL_AUTOLINK_NAME "_" CPPTL_AUTOLINK_TOOLSET_ "_" CPPTL_AUTOLINK_LINKAGE_ \ + CPPTL_AUTOLINK_CRT_ CPPTL_AUTOLINK_DEBUG_MODE_ ".lib" +# pragma comment(lib,CPPTL_AUTOLINK_FULL_NAME) + +# if defined(CPPTL_AUTOLINK_VERBOSE) && defined(_MSC_VER) +# pragma message( "Linking with" CPPTL_AUTOLINK_FULL_NAME ) +# endif + +#endif + +#undef CPPTL_AUTOLINK_TOOLSET_ +#undef CPPTL_AUTOLINK_CRT_ +#undef CPPTL_AUTOLINK_LINKAGE_ +#undef CPPTL_AUTOLINK_DEBUG_MODE_ +#undef CPPTL_AUTOLINK_FULL_NAME + diff --git a/include/json/json.h b/include/json/json.h new file mode 100644 index 0000000..e101c0d --- /dev/null +++ b/include/json/json.h @@ -0,0 +1,9 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "json_autolink.h" +# include "json_value.h" +# include "json_reader.h" +# include "json_writer.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/include/json/json_autolink.h b/include/json/json_autolink.h new file mode 100644 index 0000000..49d7534 --- /dev/null +++ b/include/json/json_autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "json_config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/include/json/json_config.h b/include/json/json_config.h new file mode 100644 index 0000000..e4d72d9 --- /dev/null +++ b/include/json/json_config.h @@ -0,0 +1,28 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/include/json/json_forwards.h b/include/json/json_forwards.h new file mode 100644 index 0000000..3f44026 --- /dev/null +++ b/include/json/json_forwards.h @@ -0,0 +1,18 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "json_config.h" + +namespace Json { + + class FastWriter; + class Path; + class PathArgument; + class Reader; + class StyledWriter; + class Value; + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/include/json/json_reader.h b/include/json/json_reader.h new file mode 100644 index 0000000..d498ef8 --- /dev/null +++ b/include/json/json_reader.h @@ -0,0 +1,139 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "json_forwards.h" +# include "json_value.h" +# include +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Unserialize a JSON document into a Value. + * + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + Reader(); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + bool collectComments_; + }; + + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/include/json/json_util.h b/include/json/json_util.h new file mode 100644 index 0000000..3f51ade --- /dev/null +++ b/include/json/json_util.h @@ -0,0 +1,12 @@ +#ifndef JSON_UTIL_H_INCLUDED +# define JSON_UTIL_H_INCLUDED + +# include "json_config.h" +# include + +namespace Json { + + +} // namespace Json + +#endif // JSON_UTIL_H_INCLUDED diff --git a/include/json/json_value.h b/include/json/json_value.h new file mode 100644 index 0000000..1fb1f82 --- /dev/null +++ b/include/json/json_value.h @@ -0,0 +1,340 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "json_forwards.h" +# include +# include +# include + +# ifdef JSON_USE_CPPTL +# include +# endif + +namespace Json { + + class Value; + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +# ifdef JSON_USE_CPPTL + typedef CppTL::AnyEnumerator EnumMemberNames; + typedef CppTL::AnyEnumerator EnumValues; +# endif + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + public: + typedef std::vector Members; + typedef int Int; + typedef unsigned int UInt; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// Removes all object members and array elements. + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Returns true if index < size(). + bool isValidIndex( UInt index ) const; + /// Append value to array at the end. + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + // Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + // Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + // Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + // Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + // Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + // Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Returns the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Returns the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Returns the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// Returns true if the object has a member named key. + bool isMember( const char *key ) const; + /// Returns true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Returns true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + // Returns a list of the member names. + Members getMemberNames() const; + +# ifdef JSON_USE_CPPTL + EnumMemberNames enumMemberNames() const; + EnumValues enumValues() const; +# endif + + void setComment( const char *comment, + CommentPlacement placement ); + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + struct CZString + { + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + typedef std::map ObjectValues; + + struct MemberNamesTransform + { + typedef const char *result_type; + const char *operator()( const CZString &name ) const + { + return name.c_str(); + } + }; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; + ObjectValues *map_; + } value_; + ValueType type_; + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( Value::UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + Value::UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/include/json/json_writer.h b/include/json/json_writer.h new file mode 100644 index 0000000..82b1a63 --- /dev/null +++ b/include/json/json_writer.h @@ -0,0 +1,93 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "json_value.h" +//# include "json_reader.h" +# include +# include + +namespace Json { + + class Value; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter + { + public: + std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter + { + public: + StyledWriter(); + + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represent the root value. + */ + std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Value::Int value ); + std::string JSON_API valueToString( Value::UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/makefiles/vs71/jsoncpp.sln b/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..fc18a80 --- /dev/null +++ b/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,30 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "..\..\src\lib_json\lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "..\..\src\jsontest\jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/src/jsontest/jsontest.vcproj b/src/jsontest/jsontest.vcproj new file mode 100644 index 0000000..f86b27e --- /dev/null +++ b/src/jsontest/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/jsontest/main.cpp b/src/jsontest/main.cpp new file mode 100644 index 0000000..c0d54b3 --- /dev/null +++ b/src/jsontest/main.cpp @@ -0,0 +1,180 @@ +#include +//#include +#include + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + int size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root ) +{ + Json::Reader reader; + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ +// Json::FastWriter writer; + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +int main( int argc, const char *argv[] ) +{ + if ( argc != 2 ) + { + printf( "Usage: %s input-json-file", argv[0] ); + return 3; + } + + std::string input = readInputTestFile( argv[1] ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", argv[1] ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", argv[1] ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + int exitCode = parseAndSaveValueTree( input, actualPath, "input", root ); + if ( exitCode == 0 ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, "rewrite", rewriteRoot ); + } + } + + return exitCode; +} \ No newline at end of file diff --git a/src/jsontest/sconscript b/src/jsontest/sconscript new file mode 100644 index 0000000..bc46095 --- /dev/null +++ b/src/jsontest/sconscript @@ -0,0 +1,6 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontest' ) diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..1720e54 --- /dev/null +++ b/src/lib_json/json_reader.cpp @@ -0,0 +1,701 @@ +#include +#include +#include +#include +#include + +namespace Json { + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() +{ +} + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + collectComments_ = collectComments; + document_ = document; + begin_ = document_.c_str(); + end_ = begin_ + document_.length(); + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_.resize(0); + errors_.resize(0); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_.resize(0); + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name.resize(0); + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + if ( !readToken( token ) + || ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ) ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + // @todo encode unicode as utf8. + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >=0 && c <= 9 ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +} // namespace Json diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp new file mode 100644 index 0000000..820fe36 --- /dev/null +++ b/src/lib_json/json_value.cpp @@ -0,0 +1,1195 @@ +#include +#include +#include +#include "assert.h" +#ifdef JSON_USE_CPPTL +# include +# include +#endif + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw + +namespace Json { + +const Value Value::null; +const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); +const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); +const Value::UInt Value::maxUInt = Value::UInt(-1); + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + free( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + free( comment_ ); + comment_ = text ? strdup( text ) : 0; +} + + + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? strdup(cstr) : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) + : cstr_( other.index_ != noDuplication && other.cstr_ != 0 ? strdup( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( index_ == duplicate ) + free( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + + + + +Value::Value( ValueType type ) + : type_( type ) + , comments_( 0 ) +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , comments_( 0 ) +{ + value_.string_ = value ? strdup( value ) : 0; +} + +Value::Value( const std::string &value ) + : type_( stringValue ) + , comments_( 0 ) +{ + value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); + +} +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , comments_( 0 ) +{ + value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + value_.string_ = strdup( other.value_.string_ ); + else + value_.string_ = 0; + break; + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + // @todo for each, reset parent... + break; + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + free( value_.string_ ); + break; + case arrayValue: + case objectValue: + delete value_.map_; + break; + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + std::swap( type_, other.type_ ); + std::swap( value_, other.value_ ); +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + free( value_.string_ ); + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + if ( type_ != other.type_ ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT( "Type is not convertible to double" && false ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT( value_.uint_ < maxInt && "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT( value_.real_ >= minInt && value_.real_ <= maxInt && "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT( "Type is not convertible to double" && false ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT( value_.int_ >= 0 && "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT( value_.real_ >= 0 && value_.real_ <= maxUInt && "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT( "Type is not convertible to double" && false ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT( "Type is not convertible to double" && false ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { + case arrayValue: // size of the array is highest index + 1 + case objectValue: + value_.map_->clear(); + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +} + + +Value & +Value::operator[]( const char *key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); + CZString actualKey( key, CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + Members members; + members.reserve( value_.map_->size() ); + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); + return members; +} + +# ifdef JSON_USE_CPPTL +EnumMemberNames +Value::enumMemberNames() const +{ + if ( type_ == objectValue ) + { + return CppTL::Enum::any( CppTL::Enum::transform( + CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), + MemberNamesTransform() ) ); + } + return EnumMemberNames(); +} + + +EnumValues +Value::enumValues() const +{ + if ( type_ == objectValue || type_ == arrayValue ) + return CppTL::Enum::anyValues( *(value_.map_), + CppTL::Type() ); + return EnumValues(); +} + +# endif + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : kind_( kindIndex ) + , index_( index ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : kind_( kindKey ) + , key_( key ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : kind_( kindKey ) + , key_( key.c_str() ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cc5932e --- /dev/null +++ b/src/lib_json/json_writer.cpp @@ -0,0 +1,410 @@ +#include +#include +#include +#include + +namespace Json { + +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + char *end = current; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Value::Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( Value::UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( Value::UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; + sprintf( buffer, "%.16g", value ); + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + return std::string("\"") + value + "\""; +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +std::string +FastWriter::write( const Value &root ) +{ + document_.resize(0); + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "[ "; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + writeValue( value[index] ); + } + document_ += " ]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{ "; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ", "; + document_ += valueToQuotedString( name.c_str() ); + document_ += " : "; + writeValue( value[name] ); + } + document_ += " }"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_.resize(0); + addChildValues_ = false; + indentString_.resize(0); + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + int size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + int index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +} // namespace Json diff --git a/src/lib_json/lib_json.vcproj b/src/lib_json/lib_json.vcproj new file mode 100644 index 0000000..cc96067 --- /dev/null +++ b/src/lib_json/lib_json.vcproj @@ -0,0 +1,202 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/lib_json/sconscript b/src/lib_json/sconscript new file mode 100644 index 0000000..cc044fb --- /dev/null +++ b/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibary' ) + +buildLibary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/test/cleantests.py b/test/cleantests.py new file mode 100644 index 0000000..5872a87 --- /dev/null +++ b/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( pattern ) + +for path in paths: + os.unlink( path ) diff --git a/test/generate_expected.py b/test/generate_expected.py new file mode 100644 index 0000000..a46e889 --- /dev/null +++ b/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/test/jsontestrunner.py b/test/jsontestrunner.py new file mode 100644 index 0000000..ec05a91 --- /dev/null +++ b/test/jsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.read( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.write( value ) + rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/test/runjsontests.py b/test/runjsontests.py new file mode 100644 index 0000000..a5284fb --- /dev/null +++ b/test/runjsontests.py @@ -0,0 +1,91 @@ +import sys +import os +import os.path +import glob + + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None ): + if not input_dir: + input_dir = os.getcwd() + tests = glob.glob( os.path.join( input_dir, '*.json' ) ) + failed_tests = [] + for input_path in tests: + print 'TESTING:', input_path, + pipe = os.popen( "%s %s" % (jsontest_executable_path, input_path) ) + process_output = pipe.read() + status = pipe.close() + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +if __name__ == '__main__': + if len(sys.argv) < 1 or len(sys.argv) > 2: + print "Usage: %s jsontest-executable-path [input-testcase-directory]" % sys.argv[0] + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( sys.argv[1] ) ) + if len(sys.argv) > 1: + input_path = os.path.normpath( os.path.abspath( sys.argv[2] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path ) + sys.exit( status ) \ No newline at end of file diff --git a/test/test_array_01.expected b/test/test_array_01.expected new file mode 100644 index 0000000..4aa8fb3 --- /dev/null +++ b/test/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/test/test_array_01.json b/test/test_array_01.json new file mode 100644 index 0000000..60b0742 --- /dev/null +++ b/test/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/test/test_array_02.expected b/test/test_array_02.expected new file mode 100644 index 0000000..5b7c72a --- /dev/null +++ b/test/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/test/test_array_02.json b/test/test_array_02.json new file mode 100644 index 0000000..c02be12 --- /dev/null +++ b/test/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/test/test_array_03.expected b/test/test_array_03.expected new file mode 100644 index 0000000..0ba568e --- /dev/null +++ b/test/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/test/test_array_03.json b/test/test_array_03.json new file mode 100644 index 0000000..ac8f422 --- /dev/null +++ b/test/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/test/test_array_04.expected b/test/test_array_04.expected new file mode 100644 index 0000000..db58c30 --- /dev/null +++ b/test/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/test/test_array_04.json b/test/test_array_04.json new file mode 100644 index 0000000..0755478 --- /dev/null +++ b/test/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/test/test_array_05.expected b/test/test_array_05.expected new file mode 100644 index 0000000..82ad484 --- /dev/null +++ b/test/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/test/test_array_05.json b/test/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/test/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/test/test_array_06.expected b/test/test_array_06.expected new file mode 100644 index 0000000..e087b63 --- /dev/null +++ b/test/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/test/test_array_06.json b/test/test_array_06.json new file mode 100644 index 0000000..9777a64 --- /dev/null +++ b/test/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/test/test_basic_01.expected b/test/test_basic_01.expected new file mode 100644 index 0000000..0527387 --- /dev/null +++ b/test/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/test/test_basic_01.json b/test/test_basic_01.json new file mode 100644 index 0000000..57cf9b9 --- /dev/null +++ b/test/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/test/test_basic_02.expected b/test/test_basic_02.expected new file mode 100644 index 0000000..9040e84 --- /dev/null +++ b/test/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/test/test_basic_02.json b/test/test_basic_02.json new file mode 100644 index 0000000..fe84da4 --- /dev/null +++ b/test/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/test/test_basic_03.expected b/test/test_basic_03.expected new file mode 100644 index 0000000..494278d --- /dev/null +++ b/test/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/test/test_basic_03.json b/test/test_basic_03.json new file mode 100644 index 0000000..feac150 --- /dev/null +++ b/test/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/test/test_basic_04.expected b/test/test_basic_04.expected new file mode 100644 index 0000000..659f744 --- /dev/null +++ b/test/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/test/test_basic_04.json b/test/test_basic_04.json new file mode 100644 index 0000000..01374bd --- /dev/null +++ b/test/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/test/test_basic_05.expected b/test/test_basic_05.expected new file mode 100644 index 0000000..cb1cdad --- /dev/null +++ b/test/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/test/test_basic_05.json b/test/test_basic_05.json new file mode 100644 index 0000000..a6d4f5a --- /dev/null +++ b/test/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/test/test_basic_06.expected b/test/test_basic_06.expected new file mode 100644 index 0000000..8b22731 --- /dev/null +++ b/test/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/test/test_basic_06.json b/test/test_basic_06.json new file mode 100644 index 0000000..5d967af --- /dev/null +++ b/test/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/test/test_basic_07.expected b/test/test_basic_07.expected new file mode 100644 index 0000000..4979ed5 --- /dev/null +++ b/test/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/test/test_basic_07.json b/test/test_basic_07.json new file mode 100644 index 0000000..b7ee6c5 --- /dev/null +++ b/test/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/test/test_basic_08.expected b/test/test_basic_08.expected new file mode 100644 index 0000000..cb1cdad --- /dev/null +++ b/test/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/test/test_basic_08.json b/test/test_basic_08.json new file mode 100644 index 0000000..fe107f4 --- /dev/null +++ b/test/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/test/test_basic_09.expected b/test/test_basic_09.expected new file mode 100644 index 0000000..cb1cdad --- /dev/null +++ b/test/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/test/test_basic_09.json b/test/test_basic_09.json new file mode 100644 index 0000000..e0cb089 --- /dev/null +++ b/test/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/test/test_complex_01.expected b/test/test_complex_01.expected new file mode 100644 index 0000000..44e753b --- /dev/null +++ b/test/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/test/test_complex_01.json b/test/test_complex_01.json new file mode 100644 index 0000000..fb2f86c --- /dev/null +++ b/test/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/test/test_integer_01.expected b/test/test_integer_01.expected new file mode 100644 index 0000000..24aa29e --- /dev/null +++ b/test/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/test/test_integer_01.json b/test/test_integer_01.json new file mode 100644 index 0000000..e82c7ad --- /dev/null +++ b/test/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/test/test_integer_02.expected b/test/test_integer_02.expected new file mode 100644 index 0000000..dab99eb --- /dev/null +++ b/test/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/test/test_integer_02.json b/test/test_integer_02.json new file mode 100644 index 0000000..548764e --- /dev/null +++ b/test/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/test/test_integer_03.expected b/test/test_integer_03.expected new file mode 100644 index 0000000..dde3260 --- /dev/null +++ b/test/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/test/test_integer_03.json b/test/test_integer_03.json new file mode 100644 index 0000000..18aeaf6 --- /dev/null +++ b/test/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/test/test_integer_04.expected b/test/test_integer_04.expected new file mode 100644 index 0000000..8da9013 --- /dev/null +++ b/test/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/test/test_integer_04.json b/test/test_integer_04.json new file mode 100644 index 0000000..8202483 --- /dev/null +++ b/test/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/test/test_integer_05.expected b/test/test_integer_05.expected new file mode 100644 index 0000000..238d1d6 --- /dev/null +++ b/test/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/test/test_integer_05.json b/test/test_integer_05.json new file mode 100644 index 0000000..4797790 --- /dev/null +++ b/test/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/test/test_object_01.expected b/test/test_object_01.expected new file mode 100644 index 0000000..8e0634e --- /dev/null +++ b/test/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/test/test_object_01.json b/test/test_object_01.json new file mode 100644 index 0000000..69a88e3 --- /dev/null +++ b/test/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/test/test_object_02.expected b/test/test_object_02.expected new file mode 100644 index 0000000..2c9de06 --- /dev/null +++ b/test/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/test/test_object_02.json b/test/test_object_02.json new file mode 100644 index 0000000..bd157ec --- /dev/null +++ b/test/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/test/test_object_03.expected b/test/test_object_03.expected new file mode 100644 index 0000000..235a28e --- /dev/null +++ b/test/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/test/test_object_03.json b/test/test_object_03.json new file mode 100644 index 0000000..0947a44 --- /dev/null +++ b/test/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/test/test_preserve_comment_01.expected b/test/test_preserve_comment_01.expected new file mode 100644 index 0000000..b5616a9 --- /dev/null +++ b/test/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/test/test_preserve_comment_01.json b/test/test_preserve_comment_01.json new file mode 100644 index 0000000..0291fff --- /dev/null +++ b/test/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/test/test_real_01.expected b/test/test_real_01.expected new file mode 100644 index 0000000..57dee39 --- /dev/null +++ b/test/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/test/test_real_01.json b/test/test_real_01.json new file mode 100644 index 0000000..5cb1bbf --- /dev/null +++ b/test/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/test/test_real_02.expected b/test/test_real_02.expected new file mode 100644 index 0000000..181592a --- /dev/null +++ b/test/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/test/test_real_02.json b/test/test_real_02.json new file mode 100644 index 0000000..45092ef --- /dev/null +++ b/test/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/test/test_real_03.expected b/test/test_real_03.expected new file mode 100644 index 0000000..181592a --- /dev/null +++ b/test/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/test/test_real_03.json b/test/test_real_03.json new file mode 100644 index 0000000..45092ef --- /dev/null +++ b/test/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/test/test_real_04.expected b/test/test_real_04.expected new file mode 100644 index 0000000..2f84bf1 --- /dev/null +++ b/test/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/test/test_real_04.json b/test/test_real_04.json new file mode 100644 index 0000000..7e71794 --- /dev/null +++ b/test/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/test/test_real_05.expected b/test/test_real_05.expected new file mode 100644 index 0000000..168f6e8 --- /dev/null +++ b/test/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/test/test_real_05.json b/test/test_real_05.json new file mode 100644 index 0000000..950f6a7 --- /dev/null +++ b/test/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/test/test_real_06.expected b/test/test_real_06.expected new file mode 100644 index 0000000..45906e3 --- /dev/null +++ b/test/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/test/test_real_06.json b/test/test_real_06.json new file mode 100644 index 0000000..dde1916 --- /dev/null +++ b/test/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/test/test_real_07.expected b/test/test_real_07.expected new file mode 100644 index 0000000..f2922f9 --- /dev/null +++ b/test/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/test/test_real_07.json b/test/test_real_07.json new file mode 100644 index 0000000..dd43ba7 --- /dev/null +++ b/test/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + From 1bdb902e30339629c949a85b2b85c7835c14bc85 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 06:42:43 +0000 Subject: [PATCH 002/268] - Synchronized jsoncpp with integration in CppTL. - Dependencies of Json::Value on std::map is now optional. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@2 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 2 +- include/json/autolink.h | 105 ----------------- include/json/{json_config.h => config.h} | 0 include/json/{json_forwards.h => forwards.h} | 0 include/json/{json_reader.h => reader.h} | 0 include/json/{json_util.h => util.h} | 0 include/json/{json_value.h => value.h} | 0 include/json/{json_writer.h => writer.h} | 0 .../jsontest.vcproj | 0 src/{jsontest => jsontestrunner}/main.cpp | 0 src/{jsontest => jsontestrunner}/sconscript | 0 src/lib_json/json_reader.cpp | 16 ++- src/lib_json/json_value.cpp | 106 +++++++++++++++++- src/lib_json/json_writer.cpp | 18 ++- 14 files changed, 125 insertions(+), 122 deletions(-) delete mode 100644 include/json/autolink.h rename include/json/{json_config.h => config.h} (100%) rename include/json/{json_forwards.h => forwards.h} (100%) rename include/json/{json_reader.h => reader.h} (100%) rename include/json/{json_util.h => util.h} (100%) rename include/json/{json_value.h => value.h} (100%) rename include/json/{json_writer.h => writer.h} (100%) rename src/{jsontest => jsontestrunner}/jsontest.vcproj (100%) rename src/{jsontest => jsontestrunner}/main.cpp (100%) rename src/{jsontest => jsontestrunner}/sconscript (100%) diff --git a/SConstruct b/SConstruct index 8e00aba..cc97787 100644 --- a/SConstruct +++ b/SConstruct @@ -133,5 +133,5 @@ RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) env.Alias( 'check' ) -buildProjectInDirectory( 'src/jsontest' ) +buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) diff --git a/include/json/autolink.h b/include/json/autolink.h deleted file mode 100644 index 8051945..0000000 --- a/include/json/autolink.h +++ /dev/null @@ -1,105 +0,0 @@ -// No gards, this header can be included multiple time - -// Generic header to automatically link against a specified library -// The library name prefix must be defined in CPPTL_AUTOLINK_NAME. -// CPPTL_AUTOLINK_NAME will be undefined after including this header. - -// The full library name is build according to the following rules: -// (0) CPPTL_AUTOLINK_NAME: library name prefix (json,...) -// (a) TOOLSET: vc6, vc70, vc71, vc80, bcb4, bcb5, bcb6 -// (b) LINKAGE: lib(static), dll(dynamic) -// The macro CPPTL_AUTOLINK_DLL must be defined to indicate that we are linking -// against a DLL. -// (c) This suffix depends on threading mode and CRT linkage -// This suffix follow Microsoft Visual Studio c++ compiler command-line option -// used to select the CRT library (/mt, /mtd...) -// Threading / Run-time library / suffix -// single / static / ml -// mutli-thread / static / mt -// multi-thread / dynamic library / md -// (e) DEBUG MODE: nothing (release), d(debug) -// FULLNAME: 0_(a)_bcd.lib -// Example: -// Compiling library "cpptl" with vc 7.1 as a static library with debug dll CRT (/MDD) -// "cpptl_vc71_libmdd" -#if !defined(CPPTL_AUTOLINK_NAME) -# error Macro CPPTL_AUTOLINK_NAME should be defined. You should not include this header directly. -#endif - -#undef CPPTL_AUTOLINK_TOOLSET_ -#undef CPPTL_AUTOLINK_CRT_ -#undef CPPTL_AUTOLINK_LINKAGE_ -#undef CPPTL_AUTOLINK_DEBUG_MODE_ - -// Select compiler -// Visual Studio -#if defined(_MSC_VER) -# if defined(_WIN32_WCE) -# define CPPTL_AUTOLINK_TOOLSET_ "evc4" -# elif (_MSC_VER < 1300) //VC6 -# define CPPTL_AUTOLINK_TOOLSET_ "vc6" -# elif (_MSC_VER < 1310) //VC7.0 (.NET 2002) -# define CPPTL_AUTOLINK_TOOLSET_ "vc70" -# elif (_MSC_VER < 1400) //VC7.1 (.NET 2003) -# define CPPTL_AUTOLINK_TOOLSET_ "vc71" -# else -# define CPPTL_AUTOLINK_TOOLSET_ "vc80" -# endif -// Borland C++ -#elif defined(__BORLANDC__) -# if (__BORLANDC__ >= 0x560) // CBuilder 6 -# define CPPTL_AUTOLINK_TOOLSET_ "bcb6" -# elif (__BORLANDC__ >= 0x550) -# define CPPTL_AUTOLINK_TOOLSET_ "bcb5" -# elif (__BORLANDC__ >= 0x540) -# define CPPTL_AUTOLINK_TOOLSET_ "bcb4" -# endif -#endif - -// Select CRT library: threading & linkage -#if defined(_MT) || defined(__MT__) -# if defined(_DLL) -# define CPPTL_AUTOLINK_CRT_ "md" -# else -# define CPPTL_AUTOLINK_CRT_ "mt" -# endif -#else -# define CPPTL_AUTOLINK_CRT_ "ml" -#endif - -// Select debug mode -#if defined(_DEBUG) -# define CPPTL_AUTOLINK_DEBUG_MODE_ "d" -#else -# define CPPTL_AUTOLINK_DEBUG_MODE_ "" -#endif - -// Select linkage -#if defined(CPPTL_AUTOLINK_DLL) -# define CPPTL_AUTOLINK_LINKAGE_ "dll" -#else -# define CPPTL_AUTOLINK_LINKAGE_ "lib" -#endif - -// Automatic link -#if defined(CPPTL_AUTOLINK_TOOLSET_) && \ - defined(CPPTL_AUTOLINK_CRT_) && \ - defined(CPPTL_AUTOLINK_LINKAGE_) && \ - defined(CPPTL_AUTOLINK_DEBUG_MODE_) -# define CPPTL_AUTOLINK_FULL_NAME \ - CPPTL_AUTOLINK_NAME "_" CPPTL_AUTOLINK_TOOLSET_ "_" CPPTL_AUTOLINK_LINKAGE_ \ - CPPTL_AUTOLINK_CRT_ CPPTL_AUTOLINK_DEBUG_MODE_ ".lib" -# pragma comment(lib,CPPTL_AUTOLINK_FULL_NAME) - -# if defined(CPPTL_AUTOLINK_VERBOSE) && defined(_MSC_VER) -# pragma message( "Linking with" CPPTL_AUTOLINK_FULL_NAME ) -# endif - -#endif - -#undef CPPTL_AUTOLINK_TOOLSET_ -#undef CPPTL_AUTOLINK_CRT_ -#undef CPPTL_AUTOLINK_LINKAGE_ -#undef CPPTL_AUTOLINK_DEBUG_MODE_ -#undef CPPTL_AUTOLINK_FULL_NAME - diff --git a/include/json/json_config.h b/include/json/config.h similarity index 100% rename from include/json/json_config.h rename to include/json/config.h diff --git a/include/json/json_forwards.h b/include/json/forwards.h similarity index 100% rename from include/json/json_forwards.h rename to include/json/forwards.h diff --git a/include/json/json_reader.h b/include/json/reader.h similarity index 100% rename from include/json/json_reader.h rename to include/json/reader.h diff --git a/include/json/json_util.h b/include/json/util.h similarity index 100% rename from include/json/json_util.h rename to include/json/util.h diff --git a/include/json/json_value.h b/include/json/value.h similarity index 100% rename from include/json/json_value.h rename to include/json/value.h diff --git a/include/json/json_writer.h b/include/json/writer.h similarity index 100% rename from include/json/json_writer.h rename to include/json/writer.h diff --git a/src/jsontest/jsontest.vcproj b/src/jsontestrunner/jsontest.vcproj similarity index 100% rename from src/jsontest/jsontest.vcproj rename to src/jsontestrunner/jsontest.vcproj diff --git a/src/jsontest/main.cpp b/src/jsontestrunner/main.cpp similarity index 100% rename from src/jsontest/main.cpp rename to src/jsontestrunner/main.cpp diff --git a/src/jsontest/sconscript b/src/jsontestrunner/sconscript similarity index 100% rename from src/jsontest/sconscript rename to src/jsontestrunner/sconscript diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index 1720e54..b69118c 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -1,9 +1,13 @@ -#include -#include +#include +#include #include #include #include +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + namespace Json { static inline bool @@ -49,8 +53,8 @@ Reader::parse( const std::string &document, current_ = begin_; lastValueEnd_ = 0; lastValue_ = 0; - commentsBefore_.resize(0); - errors_.resize(0); + commentsBefore_ = ""; + errors_.clear(); while ( !nodes_.empty() ) nodes_.pop(); nodes_.push( &root ); @@ -74,7 +78,7 @@ Reader::readValue() if ( collectComments_ && !commentsBefore_.empty() ) { currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_.resize(0); + commentsBefore_ = ""; } @@ -360,7 +364,7 @@ Reader::readObject( Token &tokenStart ) if ( tokenName.type_ != tokenString ) break; - name.resize(0); + name = ""; if ( !decodeString( tokenName, name ) ) return recoverFromError( tokenObjectEnd ); diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 820fe36..3001ba0 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -1,5 +1,5 @@ -#include -#include +#include +#include #include #include "assert.h" #ifdef JSON_USE_CPPTL @@ -18,6 +18,75 @@ const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); const Value::UInt Value::maxUInt = Value::UInt(-1); + +const char * +Value::MemberIterator::deref() const +{ + CPPTL_ASSERT_MESSAGE( current_ != 0, + "SmallMapIterator: dereferencing invalid iterator" ); + return current_->first.c_str(); +} + + +void +Value::MemberIterator::increment() +{ + CPPTL_ASSERT_MESSAGE( map_ && ( current_ < map_->data_ + map_->size_ ), + "SmallMapIterator::increment: incrementing beyond end" ); + ++current_; +} + + +void +Value::MemberIterator::decrement() +{ + CPPTL_ASSERT_MESSAGE( map_ && ( current_ > map_->data_ ), + "SmallMapIterator::decrement: decrementing beyond beginning" ); + --current_; +} + + +void +Value::MemberIterator::advance( difference_type n ) +{ + CPPTL_ASSERT_MESSAGE( map_ && map_->size_ && + ( current_+n < map_->data_ + map_->size_ && current+n >= map_->data_), + "SmallMapIterator::advance: advancing beyond end or beginning" ); + current_ += n; +} + + +Value::MemberIterator::difference_type +Value::MemberIterator::computeDistance( const SelfType &other ) const +{ + CPPTL_ASSERT_MESSAGE( map_->data_ == other.map_->data_, "Comparing iterator on different container." ); + return current_ - other.current_; +} + + +bool +Value::MemberIterator::isEqual( const SelfType &other ) const +{ + return current_ == other.current_; +} + + +bool +Value::MemberIterator::isLess( const SelfType &other ) const +{ + return current_ < other.current_; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + Value::CommentInfo::CommentInfo() : comment_( 0 ) { @@ -39,6 +108,13 @@ Value::CommentInfo::setComment( const char *text ) } +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// // Notes: index_ indicates if the string was allocated when // a string is stored. @@ -65,7 +141,7 @@ Value::CZString::CZString( const CZString &other ) Value::CZString::~CZString() { - if ( index_ == duplicate ) + if ( cstr_ && index_ == duplicate ) free( const_cast( cstr_ ) ); } @@ -115,11 +191,19 @@ Value::CZString::c_str() const } +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// Value::Value( ValueType type ) : type_( type ) , comments_( 0 ) + , allocated_( 0 ) { switch ( type ) { @@ -172,6 +256,7 @@ Value::Value( double value ) Value::Value( const char *value ) : type_( stringValue ) + , allocated_( true ) , comments_( 0 ) { value_.string_ = value ? strdup( value ) : 0; @@ -179,6 +264,7 @@ Value::Value( const char *value ) Value::Value( const std::string &value ) : type_( stringValue ) + , allocated_( true ) , comments_( 0 ) { value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); @@ -187,6 +273,7 @@ Value::Value( const std::string &value ) # ifdef JSON_USE_CPPTL Value::Value( const CppTL::ConstString &value ) : type_( stringValue ) + , allocated_( true ) , comments_( 0 ) { value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); @@ -216,7 +303,10 @@ Value::Value( const Value &other ) break; case stringValue: if ( other.value_.string_ ) + { value_.string_ = strdup( other.value_.string_ ); + allocated_ = true; + } else value_.string_ = 0; break; @@ -252,7 +342,8 @@ Value::~Value() case booleanValue: break; case stringValue: - free( value_.string_ ); + if ( allocated_ ) + free( value_.string_ ); break; case arrayValue: case objectValue: @@ -277,8 +368,13 @@ Value::operator=( const Value &other ) void Value::swap( Value &other ) { - std::swap( type_, other.type_ ); + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; std::swap( value_, other.value_ ); + bool temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; } ValueType diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index cc5932e..f47e3f9 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -1,8 +1,12 @@ -#include +#include #include #include #include +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + namespace Json { static void uintToString( unsigned int value, @@ -45,7 +49,11 @@ std::string valueToString( Value::UInt value ) std::string valueToString( double value ) { char buffer[32]; - sprintf( buffer, "%.16g", value ); +#ifdef __STDC_SECURE_LIB__ // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%.16g", value); +#else + sprintf(buffer, "%.16g", value); +#endif return buffer; } @@ -67,7 +75,7 @@ std::string valueToQuotedString( const char *value ) std::string FastWriter::write( const Value &root ) { - document_.resize(0); + document_ = ""; writeValue( root ); document_ += "\n"; return document_; @@ -145,9 +153,9 @@ StyledWriter::StyledWriter() std::string StyledWriter::write( const Value &root ) { - document_.resize(0); + document_ = ""; addChildValues_ = false; - indentString_.resize(0); + indentString_ = ""; writeCommentBeforeValue( root ); writeValue( root ); writeCommentAfterValueOnSameLine( root ); From ef8b2eaa5a83c32ed055cfac0aed696bb6c95b8c Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 06:44:06 +0000 Subject: [PATCH 003/268] - Synchronized jsoncpp with integration in CppTL. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@3 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/{json_autolink.h => autolink.h} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename include/json/{json_autolink.h => autolink.h} (100%) diff --git a/include/json/json_autolink.h b/include/json/autolink.h similarity index 100% rename from include/json/json_autolink.h rename to include/json/autolink.h From 1b9ce6f3cdd919f76bdc99a411911eba51963ec5 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 07:13:50 +0000 Subject: [PATCH 004/268] - Synchronized jsoncpp with integration in CppTL. - Dependencies of Json::Value on std::map is now optional. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@4 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 2 +- include/json/autolink.h | 2 +- include/json/config.h | 1 + include/json/forwards.h | 2 +- include/json/json.h | 8 +- include/json/reader.h | 5 +- include/json/value.h | 198 ++++++++++++++++++++++++++++++----- include/json/writer.h | 5 +- makefiles/vs71/jsoncpp.sln | 2 +- src/lib_json/json_value.cpp | 115 ++++++++++---------- src/lib_json/lib_json.vcproj | 19 ++-- 11 files changed, 249 insertions(+), 110 deletions(-) diff --git a/SConstruct b/SConstruct index cc97787..9046f16 100644 --- a/SConstruct +++ b/SConstruct @@ -65,7 +65,7 @@ elif platform == 'msvc80': env['MSVS_VERSION']='8.0' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' + env['CXXFLAGS']='-GR -EHsc /nologo /MT' elif platform == 'mingw': env.Tool( 'mingw' ) env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) diff --git a/include/json/autolink.h b/include/json/autolink.h index 49d7534..ef5e0ab 100644 --- a/include/json/autolink.h +++ b/include/json/autolink.h @@ -1,7 +1,7 @@ #ifndef JSON_AUTOLINK_H_INCLUDED # define JSON_AUTOLINK_H_INCLUDED -# include "json_config.h" +# include "config.h" # ifdef JSON_IN_CPPTL # include diff --git a/include/json/config.h b/include/json/config.h index e4d72d9..c05aa73 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -6,6 +6,7 @@ /// If defined, indicates that json may leverage CppTL library //# define JSON_USE_CPPTL 1 +//# define JSON_USE_CPPTL_SMALLMAP 1 # ifdef JSON_IN_CPPTL diff --git a/include/json/forwards.h b/include/json/forwards.h index 3f44026..e4d8727 100644 --- a/include/json/forwards.h +++ b/include/json/forwards.h @@ -1,7 +1,7 @@ #ifndef JSON_FORWARDS_H_INCLUDED # define JSON_FORWARDS_H_INCLUDED -# include "json_config.h" +# include "config.h" namespace Json { diff --git a/include/json/json.h b/include/json/json.h index e101c0d..c2a24ea 100644 --- a/include/json/json.h +++ b/include/json/json.h @@ -1,9 +1,9 @@ #ifndef JSON_JSON_H_INCLUDED # define JSON_JSON_H_INCLUDED -# include "json_autolink.h" -# include "json_value.h" -# include "json_reader.h" -# include "json_writer.h" +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" #endif // JSON_JSON_H_INCLUDED diff --git a/include/json/reader.h b/include/json/reader.h index d498ef8..27225f8 100644 --- a/include/json/reader.h +++ b/include/json/reader.h @@ -1,10 +1,9 @@ #ifndef CPPTL_JSON_READER_H_INCLUDED # define CPPTL_JSON_READER_H_INCLUDED -# include "json_forwards.h" -# include "json_value.h" +# include "forwards.h" +# include "value.h" # include -# include # include # include diff --git a/include/json/value.h b/include/json/value.h index 1fb1f82..d7c3c95 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -1,11 +1,15 @@ #ifndef CPPTL_JSON_H_INCLUDED # define CPPTL_JSON_H_INCLUDED -# include "json_forwards.h" +# include "forwards.h" # include -# include # include +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif # ifdef JSON_USE_CPPTL # include # endif @@ -79,6 +83,168 @@ namespace Json { static const Int minInt; static const Int maxInt; static const UInt maxUInt; + private: + + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif + + public: + //class MemberIterator + //{ + //public: + // typedef unsigned int size_t; + // typedef int difference_type; + // typedef MemberIterator SelfType; + // typedef const char *reference; + // typedef const char **pointer; + + // MemberIterator(); + // explicit MemberIterator( ObjectValues::const_iterator current ); + + // bool operator ==( const SelfType &other ) const + // { + // return isEqual( other ); + // } + + // bool operator !=( const SelfType &other ) const + // { + // return !isEqual( other ); + // } + + // bool operator <( const SelfType &other ) const + // { + // return isLess( other ); + // } + + // bool operator <=( const SelfType &other ) const + // { + // return !other.isLess( *this ); + // } + + // bool operator >=( const SelfType &other ) const + // { + // return !isLess( other ); + // } + + // bool operator >( const SelfType &other ) const + // { + // return other.isLess( *this ); + // } + + // SelfType &operator++() + // { + // increment(); + // return *this; + // } + + // SelfType operator++( int ) + // { + // SelfType temp( *this ); + // ++*this; + // return temp; + // } + + // SelfType &operator--() + // { + // decrement(); + // return *this; + // } + + // SelfType operator--( int ) + // { + // SelfType temp( *this ); + // --*this; + // return temp; + // } + + // SelfType &operator +=( difference_type n ) + // { + // advance( n ); + // return *this; + // } + + // SelfType operator +( difference_type n ) const + // { + // SelfType temp( *this ); + // return temp += n; + // } + + // SelfType &operator -=( difference_type n ) + // { + // advance( -n ); + // return *this; + // } + + // SelfType operator -( difference_type n ) const + // { + // SelfType temp( *this ); + // return temp -= n; + // } + + // reference operator[]( difference_type n ) const + // { + // return *( *this + n ); + // } + + // reference operator *() const + // { + // return deref(); + // } + + // difference_type operator -( const SelfType &other ) const + // { + // return computeDistance( other ); + // } + + //private: + // const char *deref() const; + + // void increment(); + + // void decrement(); + + // void advance( difference_type n ); + + // difference_type computeDistance( const SelfType &other ) const; + + // bool isEqual( const SelfType &other ) const; + + // bool isLess( const SelfType &other ) const; + + //private: + // //ObjectValues::const_iterator begin_; + // //ObjectValues::const_iterator end_; + // ObjectValues::const_iterator current_; + //}; + + public: Value( ValueType type = nullValue ); Value( Int value ); @@ -219,31 +385,6 @@ namespace Json { char *comment_; }; - struct CZString - { - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - typedef std::map ObjectValues; - struct MemberNamesTransform { typedef const char *result_type; @@ -262,7 +403,8 @@ namespace Json { char *string_; ObjectValues *map_; } value_; - ValueType type_; + ValueType type_ : 8; + bool allocated_ : 1; CommentInfo *comments_; }; diff --git a/include/json/writer.h b/include/json/writer.h index 82b1a63..3abf2ea 100644 --- a/include/json/writer.h +++ b/include/json/writer.h @@ -1,9 +1,8 @@ #ifndef JSON_WRITER_H_INCLUDED # define JSON_WRITER_H_INCLUDED -# include "json_value.h" -//# include "json_reader.h" -# include +# include "value.h" +# include # include namespace Json { diff --git a/makefiles/vs71/jsoncpp.sln b/makefiles/vs71/jsoncpp.sln index fc18a80..613c267 100644 --- a/makefiles/vs71/jsoncpp.sln +++ b/makefiles/vs71/jsoncpp.sln @@ -3,7 +3,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "..\..\src\lib_j ProjectSection(ProjectDependencies) = postProject EndProjectSection EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "..\..\src\jsontest\jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "..\..\src\jsontestrunner\jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" ProjectSection(ProjectDependencies) = postProject {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} EndProjectSection diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 3001ba0..de7173f 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -9,6 +9,7 @@ #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) assert( condition && message ); // @todo <= change this into an exception throw namespace Json { @@ -19,63 +20,63 @@ const Value::UInt Value::maxUInt = Value::UInt(-1); -const char * -Value::MemberIterator::deref() const -{ - CPPTL_ASSERT_MESSAGE( current_ != 0, - "SmallMapIterator: dereferencing invalid iterator" ); - return current_->first.c_str(); -} - - -void -Value::MemberIterator::increment() -{ - CPPTL_ASSERT_MESSAGE( map_ && ( current_ < map_->data_ + map_->size_ ), - "SmallMapIterator::increment: incrementing beyond end" ); - ++current_; -} - - -void -Value::MemberIterator::decrement() -{ - CPPTL_ASSERT_MESSAGE( map_ && ( current_ > map_->data_ ), - "SmallMapIterator::decrement: decrementing beyond beginning" ); - --current_; -} - - -void -Value::MemberIterator::advance( difference_type n ) -{ - CPPTL_ASSERT_MESSAGE( map_ && map_->size_ && - ( current_+n < map_->data_ + map_->size_ && current+n >= map_->data_), - "SmallMapIterator::advance: advancing beyond end or beginning" ); - current_ += n; -} - - -Value::MemberIterator::difference_type -Value::MemberIterator::computeDistance( const SelfType &other ) const -{ - CPPTL_ASSERT_MESSAGE( map_->data_ == other.map_->data_, "Comparing iterator on different container." ); - return current_ - other.current_; -} - - -bool -Value::MemberIterator::isEqual( const SelfType &other ) const -{ - return current_ == other.current_; -} - - -bool -Value::MemberIterator::isLess( const SelfType &other ) const -{ - return current_ < other.current_; -} +//const char * +//Value::MemberIterator::deref() const +//{ +// JSON_ASSERT_MESSAGE( current_ != 0, +// "SmallMapIterator: dereferencing invalid iterator" ); +// return current_->first.c_str(); +//} +// +// +//void +//Value::MemberIterator::increment() +//{ +// JSON_ASSERT_MESSAGE( map_ && ( current_ < map_->data_ + map_->size_ ), +// "SmallMapIterator::increment: incrementing beyond end" ); +// ++current_; +//} +// +// +//void +//Value::MemberIterator::decrement() +//{ +// JSON_ASSERT_MESSAGE( map_ && ( current_ > map_->data_ ), +// "SmallMapIterator::decrement: decrementing beyond beginning" ); +// --current_; +//} +// +// +//void +//Value::MemberIterator::advance( difference_type n ) +//{ +// JSON_ASSERT_MESSAGE( map_ && map_->size_ && +// ( current_+n < map_->data_ + map_->size_ && current+n >= map_->data_), +// "SmallMapIterator::advance: advancing beyond end or beginning" ); +// current_ += n; +//} +// +// +//Value::MemberIterator::difference_type +//Value::MemberIterator::computeDistance( const SelfType &other ) const +//{ +// JSON_ASSERT_MESSAGE( map_->data_ == other.map_->data_, "Comparing iterator on different container." ); +// return current_ - other.current_; +//} +// +// +//bool +//Value::MemberIterator::isEqual( const SelfType &other ) const +//{ +// return current_ == other.current_; +//} +// +// +//bool +//Value::MemberIterator::isLess( const SelfType &other ) const +//{ +// return current_ < other.current_; +//} // ////////////////////////////////////////////////////////////////// diff --git a/src/lib_json/lib_json.vcproj b/src/lib_json/lib_json.vcproj index cc96067..048d828 100644 --- a/src/lib_json/lib_json.vcproj +++ b/src/lib_json/lib_json.vcproj @@ -167,34 +167,31 @@ RelativePath="..\..\include\json\autolink.h"> - - + RelativePath="..\..\include\json\config.h"> + RelativePath="..\..\include\json\forwards.h"> + RelativePath="..\..\include\json\json.h"> + RelativePath=".\json_value.cpp"> + RelativePath=".\json_writer.cpp"> + RelativePath="..\..\include\json\reader.h"> + RelativePath="..\..\include\json\value.h"> + RelativePath="..\..\include\json\writer.h"> From 8caa2efdbe76a28a221cc5ef796bb6daa28cc177 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 12:31:37 +0000 Subject: [PATCH 005/268] - removed warning when compiling on msvc80. - added (untested) support for iteration over object values or array values. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@5 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/value.h | 268 +++++++++++++++++---------------- src/lib_json/json_value.cpp | 285 ++++++++++++++++++++++++++++-------- 2 files changed, 361 insertions(+), 192 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index d7c3c95..9166cfc 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -116,136 +116,142 @@ namespace Json { # endif public: - //class MemberIterator - //{ - //public: - // typedef unsigned int size_t; - // typedef int difference_type; - // typedef MemberIterator SelfType; - // typedef const char *reference; - // typedef const char **pointer; - - // MemberIterator(); - // explicit MemberIterator( ObjectValues::const_iterator current ); - - // bool operator ==( const SelfType &other ) const - // { - // return isEqual( other ); - // } - - // bool operator !=( const SelfType &other ) const - // { - // return !isEqual( other ); - // } - - // bool operator <( const SelfType &other ) const - // { - // return isLess( other ); - // } - - // bool operator <=( const SelfType &other ) const - // { - // return !other.isLess( *this ); - // } - - // bool operator >=( const SelfType &other ) const - // { - // return !isLess( other ); - // } - - // bool operator >( const SelfType &other ) const - // { - // return other.isLess( *this ); - // } - - // SelfType &operator++() - // { - // increment(); - // return *this; - // } - - // SelfType operator++( int ) - // { - // SelfType temp( *this ); - // ++*this; - // return temp; - // } - - // SelfType &operator--() - // { - // decrement(); - // return *this; - // } - - // SelfType operator--( int ) - // { - // SelfType temp( *this ); - // --*this; - // return temp; - // } - - // SelfType &operator +=( difference_type n ) - // { - // advance( n ); - // return *this; - // } - - // SelfType operator +( difference_type n ) const - // { - // SelfType temp( *this ); - // return temp += n; - // } - - // SelfType &operator -=( difference_type n ) - // { - // advance( -n ); - // return *this; - // } - - // SelfType operator -( difference_type n ) const - // { - // SelfType temp( *this ); - // return temp -= n; - // } - - // reference operator[]( difference_type n ) const - // { - // return *( *this + n ); - // } - - // reference operator *() const - // { - // return deref(); - // } - - // difference_type operator -( const SelfType &other ) const - // { - // return computeDistance( other ); - // } - - //private: - // const char *deref() const; - - // void increment(); - - // void decrement(); - - // void advance( difference_type n ); - - // difference_type computeDistance( const SelfType &other ) const; - - // bool isEqual( const SelfType &other ) const; - - // bool isLess( const SelfType &other ) const; - - //private: - // //ObjectValues::const_iterator begin_; - // //ObjectValues::const_iterator end_; - // ObjectValues::const_iterator current_; - //}; + class IteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef IteratorBase SelfType; - public: + IteratorBase(); + explicit IteratorBase( const ObjectValues::iterator ¤t ); + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + void copy( const SelfType &other ); + + private: + ObjectValues::iterator current_; + }; + + class const_iterator : public IteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef const_iterator SelfType; + + const_iterator(); + explicit const_iterator( const ObjectValues::iterator ¤t ); + SelfType &operator =( const IteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + class iterator : public IteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef iterator SelfType; + + iterator(); + iterator( const const_iterator &other ); + iterator( const iterator &other ); + explicit iterator( const ObjectValues::iterator ¤t ); + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + public: Value( ValueType type = nullValue ); Value( Int value ); Value( UInt value ); @@ -374,6 +380,12 @@ namespace Json { std::string toStyledString() const; + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + private: struct CommentInfo { diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index de7173f..21dc54f 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -6,6 +6,7 @@ # include # include #endif +#include // size_t #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw @@ -18,65 +19,159 @@ const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); const Value::UInt Value::maxUInt = Value::UInt(-1); +// Our "safe" implementation of strdup. Allow null pointer to be passed. + +inline char *safeStringDup( const char *czstring ) +{ + if ( czstring ) + { + const size_t length = (unsigned int)( strlen(czstring) + 1 ); + char *newString = static_cast( malloc( length ) ); + memcpy( newString, czstring, length ); + return newString; + } + return 0; +} + +inline char *safeStringDup( const std::string &str ) +{ + if ( !str.empty() ) + { + const size_t length = str.length(); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, str.c_str(), length ); + newString[length] = 0; + return newString; + } + return 0; +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::IteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +Value::IteratorBase::IteratorBase() +{ +} + + +Value::IteratorBase::IteratorBase( const ObjectValues::iterator ¤t ) + : current_( current ) +{ +} + + +Value & +Value::IteratorBase::deref() const +{ + return current_->second; +} + + +void +Value::IteratorBase::increment() +{ + ++current_; +} -//const char * -//Value::MemberIterator::deref() const -//{ -// JSON_ASSERT_MESSAGE( current_ != 0, -// "SmallMapIterator: dereferencing invalid iterator" ); -// return current_->first.c_str(); -//} -// -// -//void -//Value::MemberIterator::increment() -//{ -// JSON_ASSERT_MESSAGE( map_ && ( current_ < map_->data_ + map_->size_ ), -// "SmallMapIterator::increment: incrementing beyond end" ); -// ++current_; -//} -// -// -//void -//Value::MemberIterator::decrement() -//{ -// JSON_ASSERT_MESSAGE( map_ && ( current_ > map_->data_ ), -// "SmallMapIterator::decrement: decrementing beyond beginning" ); -// --current_; -//} -// -// -//void -//Value::MemberIterator::advance( difference_type n ) -//{ -// JSON_ASSERT_MESSAGE( map_ && map_->size_ && -// ( current_+n < map_->data_ + map_->size_ && current+n >= map_->data_), -// "SmallMapIterator::advance: advancing beyond end or beginning" ); -// current_ += n; -//} -// -// -//Value::MemberIterator::difference_type -//Value::MemberIterator::computeDistance( const SelfType &other ) const -//{ -// JSON_ASSERT_MESSAGE( map_->data_ == other.map_->data_, "Comparing iterator on different container." ); -// return current_ - other.current_; -//} -// -// -//bool -//Value::MemberIterator::isEqual( const SelfType &other ) const -//{ -// return current_ == other.current_; -//} -// -// -//bool -//Value::MemberIterator::isLess( const SelfType &other ) const -//{ -// return current_ < other.current_; -//} +void +Value::IteratorBase::decrement() +{ + --current_; +} + + +Value::IteratorBase::difference_type +Value::IteratorBase::computeDistance( const SelfType &other ) const +{ +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + return difference_type( std::distance( current_, other.current_ ) ); +# endif +} + + +bool +Value::IteratorBase::isEqual( const SelfType &other ) const +{ + return current_ == other.current_; +} + + +void +Value::IteratorBase::copy( const SelfType &other ) +{ + current_ = other.current_; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::const_iterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +Value::const_iterator::const_iterator() +{ +} + + +Value::const_iterator::const_iterator( const ObjectValues::iterator ¤t ) + : IteratorBase( current ) +{ +} + +Value::const_iterator::SelfType & +Value::const_iterator::operator =( const IteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::iterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +Value::iterator::iterator() +{ +} + + +Value::iterator::iterator( const ObjectValues::iterator ¤t ) + : IteratorBase( current ) +{ +} + +Value::iterator::iterator( const const_iterator &other ) + : IteratorBase( other ) +{ +} + +Value::iterator::iterator( const iterator &other ) + : IteratorBase( other ) +{ +} + +Value::iterator::SelfType & +Value::iterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + // ////////////////////////////////////////////////////////////////// @@ -105,7 +200,7 @@ Value::CommentInfo::setComment( const char *text ) { if ( comment_ ) free( comment_ ); - comment_ = text ? strdup( text ) : 0; + comment_ = safeStringDup( text ); } @@ -127,13 +222,13 @@ Value::CZString::CZString( int index ) } Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? strdup(cstr) : cstr ) + : cstr_( allocate == duplicate ? safeStringDup(cstr) : cstr ) , index_( allocate ) { } Value::CZString::CZString( const CZString &other ) - : cstr_( other.index_ != noDuplication && other.cstr_ != 0 ? strdup( other.cstr_ ) + : cstr_( other.index_ != noDuplication && other.cstr_ != 0 ? safeStringDup( other.cstr_ ) : other.cstr_ ) , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) : other.index_ ) @@ -260,7 +355,7 @@ Value::Value( const char *value ) , allocated_( true ) , comments_( 0 ) { - value_.string_ = value ? strdup( value ) : 0; + value_.string_ = safeStringDup( value ); } Value::Value( const std::string &value ) @@ -268,7 +363,7 @@ Value::Value( const std::string &value ) , allocated_( true ) , comments_( 0 ) { - value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); + value_.string_ = safeStringDup( value ); } # ifdef JSON_USE_CPPTL @@ -277,7 +372,7 @@ Value::Value( const CppTL::ConstString &value ) , allocated_( true ) , comments_( 0 ) { - value_.string_ = value.empty() ? 0 : strdup( value.c_str() ); + value_.string_ = safeStringDup( value ); } # endif @@ -305,7 +400,7 @@ Value::Value( const Value &other ) case stringValue: if ( other.value_.string_ ) { - value_.string_ = strdup( other.value_.string_ ); + value_.string_ = safeStringDup( other.value_.string_ ); allocated_ = true; } else @@ -1080,6 +1175,68 @@ Value::toStyledString() const } +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + // fall through default if no valid map + default: + return const_iterator(); + } +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + // fall through default if no valid map + default: + return const_iterator(); + } +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + // fall through default if no valid map + default: + return iterator(); + } +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + // fall through default if no valid map + default: + return iterator(); + } +} + + // class PathArgument // ////////////////////////////////////////////////////////////////// From e37ca476d35fc9de91689031b71daf80beb377a4 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 17:47:12 +0000 Subject: [PATCH 006/268] - integrated doxygen documentation generation in scons build system - updated header.html to handle lastest doxygen (tabs in html) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@6 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 9 +++ doc/doxyfile | 4 +- doc/header.html | 1 + doc/sconscript | 5 ++ scons-tools/doxygen.py | 176 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 193 insertions(+), 2 deletions(-) create mode 100644 doc/sconscript create mode 100644 scons-tools/doxygen.py diff --git a/SConstruct b/SConstruct index 9046f16..2ed8f31 100644 --- a/SConstruct +++ b/SConstruct @@ -31,6 +31,7 @@ if not os.path.exists( sconsign_dir_path ): SConsignFile( sconsign_path ) env = Environment( ENV = {'PATH' : os.environ['PATH']}, + toolpath = ['scons-tools'], tools=[] ) #, tools=['default'] ) if platform == 'suncc': @@ -127,11 +128,19 @@ def runJSONTests_action( target, source = None, env = None ): def runJSONTests_string( target, source = None, env = None ): return 'RunJSONTests("%s")' % source +##def buildDoc( doxyfile_path ): +## doc_cmd = env.Doxygen( doxyfile_path ) + import SCons.Action ActionFactory = SCons.Action.ActionFactory RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) env.Alias( 'check' ) +env.Tool('doxygen') + buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'doc' ) +##build_doc = ('doc' in COMMAND_LINE_TARGETS) or ('doc-dist' in COMMAND_LINE_TARGETS) +##if build_doc: diff --git a/doc/doxyfile b/doc/doxyfile index e3a4085..be000bd 100644 --- a/doc/doxyfile +++ b/doc/doxyfile @@ -5,7 +5,7 @@ #--------------------------------------------------------------------------- PROJECT_NAME = "JsonCpp" PROJECT_NUMBER = 0.0 -OUTPUT_DIRECTORY = docbuild +OUTPUT_DIRECTORY = ..\buildscons CREATE_SUBDIRS = NO OUTPUT_LANGUAGE = English USE_WINDOWS_ENCODING = NO @@ -114,7 +114,7 @@ IGNORE_PREFIX = # configuration options related to the HTML output #--------------------------------------------------------------------------- GENERATE_HTML = YES -HTML_OUTPUT = html +HTML_OUTPUT = json-html-doc HTML_FILE_EXTENSION = .html HTML_HEADER = header.html HTML_FOOTER = footer.html diff --git a/doc/header.html b/doc/header.html index cb4c006..2288b04 100644 --- a/doc/header.html +++ b/doc/header.html @@ -4,6 +4,7 @@ JsonCpp - JSON data format manipulation library + diff --git a/doc/sconscript b/doc/sconscript new file mode 100644 index 0000000..8025039 --- /dev/null +++ b/doc/sconscript @@ -0,0 +1,5 @@ +Import( 'env' ) + +doc_cmd = env.Doxygen('doxyfile') +Alias('doc', doc_cmd) +AlwaysBuild(doc_cmd) diff --git a/scons-tools/doxygen.py b/scons-tools/doxygen.py new file mode 100644 index 0000000..0a8056a --- /dev/null +++ b/scons-tools/doxygen.py @@ -0,0 +1,176 @@ +import os +import os.path +import glob +from fnmatch import fnmatch + +def DoxyfileParse(file_contents): + """ + Parse a Doxygen source file and return a dictionary of all the values. + Values will be strings and lists of strings. + """ + data = {} + + import shlex + lex = shlex.shlex(instream = file_contents, posix = True) + lex.wordchars += "*+./-" + lex.whitespace = lex.whitespace.replace("\n", "") + lex.escape = "" + + lineno = lex.lineno + token = lex.get_token() + key = token # the first token should be a key + last_token = "" + key_token = False + next_key = False + + while token: + if token in ['\n']: + if last_token not in ['\\']: + key_token = True + elif token in ['\\']: + pass + elif key_token: + key = token + key_token = False + else: + if token == "+=": + if not data.has_key(key): + data[key] = list() + elif token == "=": + data[key] = list() + else: + data[key].append(token) + + last_token = token + token = lex.get_token() + + # compress lists of len 1 into single strings + for (k, v) in data.items(): + if len(v) == 0: + data.pop(k) + + # items in the following list will be kept as lists and not converted to strings + if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: + continue + + if len(v) == 1: + data[k] = v[0] + + return data + +def DoxySourceScan(node, env, path): + """ + Doxygen Doxyfile source scanner. This should scan the Doxygen file and add + any files used to generate docs to the list of source files. + """ + default_file_patterns = [ + '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', + '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', + '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', + '*.py', + ] + + default_exclude_patterns = [ + '*~', + ] + + sources = [] + + data = DoxyfileParse(node.get_contents()) + + if data.get("RECURSIVE", "NO") == "YES": + recursive = True + else: + recursive = False + + file_patterns = data.get("FILE_PATTERNS", default_file_patterns) + exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) + + for node in data.get("INPUT", []): + if os.path.isfile(node): + sources.add(node) + elif os.path.isdir(node): + if recursive: + for root, dirs, files in os.walk(node): + for f in files: + filename = os.path.join(root, f) + + pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) + exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) + + if pattern_check and not exclude_check: + sources.append(filename) + else: + for pattern in file_patterns: + sources.extend(glob.glob("/".join([node, pattern]))) + + return sources + + +def DoxySourceScanCheck(node, env): + """Check if we should scan this file""" + return os.path.isfile(node.path) + +def DoxyEmitter(source, target, env): + """Doxygen Doxyfile emitter""" + # possible output formats and their default values and output locations + output_formats = { + "HTML": ("YES", "html"), + "LATEX": ("YES", "latex"), + "RTF": ("NO", "rtf"), + "MAN": ("YES", "man"), + "XML": ("NO", "xml"), + } + + data = DoxyfileParse(source[0].get_contents()) + + targets = [] + out_dir = os.path.sep.join( data.get("OUTPUT_DIRECTORY", ".") ) + + # add our output locations + for (k, v) in output_formats.items(): + if data.get("GENERATE_" + k, v[0]) == "YES": + targets.append("/".join([out_dir, data.get(k + "_OUTPUT", v[1])])) + + # don't clobber targets + for node in targets: + env.Precious(node) + + # set up cleaning stuff + for node in targets: + env.Clean(node, node) + + return (targets, source) + +def generate(env): + """ + Add builders and construction variables for the + Doxygen tool. This is currently for Doxygen 1.4.6. + """ + doxyfile_scanner = env.Scanner( + DoxySourceScan, + "DoxySourceScan", + scan_check = DoxySourceScanCheck, + ) + + doxyfile_builder = env.Builder( + action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}"), + emitter = DoxyEmitter, + target_factory = env.fs.Entry, + single_source = True, + source_scanner = doxyfile_scanner, + ) + + env.Append(BUILDERS = { + 'Doxygen': doxyfile_builder, + }) + + env.AppendUnique( + DOXYGEN = 'doxygen', + ) + +def exists(env): + """ + Make sure doxygen exists. + """ + return env.Detect("doxygen") From ac1c1739027306a3b6276b7e6bd2d1497f51d61b Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 17:48:17 +0000 Subject: [PATCH 007/268] - fixed compilation issue on msvc6 platform. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@7 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/forwards.h | 9 +- include/json/value.h | 291 +++++++++++++++++++----------------- src/jsontestrunner/main.cpp | 4 + src/lib_json/json_value.cpp | 55 +++---- 4 files changed, 192 insertions(+), 167 deletions(-) diff --git a/include/json/forwards.h b/include/json/forwards.h index e4d8727..b168fd2 100644 --- a/include/json/forwards.h +++ b/include/json/forwards.h @@ -6,11 +6,16 @@ namespace Json { class FastWriter; - class Path; - class PathArgument; class Reader; class StyledWriter; + + // value.h + class Path; + class PathArgument; class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; } // namespace Json diff --git a/include/json/value.h b/include/json/value.h index 9166cfc..5abbf65 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -16,8 +16,6 @@ namespace Json { - class Value; - /** \brief Type of the value held by a Value object. */ enum ValueType @@ -78,6 +76,8 @@ namespace Json { typedef std::vector Members; typedef int Int; typedef unsigned int UInt; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; static const Value null; static const Int minInt; @@ -109,148 +109,13 @@ namespace Json { int index_; }; + public: # ifndef JSON_USE_CPPTL_SMALLMAP typedef std::map ObjectValues; # else typedef CppTL::SmallMap ObjectValues; # endif - public: - class IteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef IteratorBase SelfType; - - IteratorBase(); - explicit IteratorBase( const ObjectValues::iterator ¤t ); - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: - ObjectValues::iterator current_; - }; - - class const_iterator : public IteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef const_iterator SelfType; - - const_iterator(); - explicit const_iterator( const ObjectValues::iterator ¤t ); - SelfType &operator =( const IteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - class iterator : public IteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef iterator SelfType; - - iterator(); - iterator( const const_iterator &other ); - iterator( const iterator &other ); - explicit iterator( const ObjectValues::iterator ¤t ); - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - public: Value( ValueType type = nullValue ); Value( Int value ); @@ -421,6 +286,156 @@ namespace Json { }; + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: + Value::ObjectValues::iterator current_; + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + /*! \internal Use by Value to create an iterator. + */ + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + /*! \internal Use by Value to create an iterator. + */ + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. */ class PathArgument diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index c0d54b3..76c7c12 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -2,6 +2,10 @@ //#include #include +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + static std::string readInputTestFile( const char *path ) { diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 21dc54f..813cddc 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -19,7 +19,8 @@ const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); const Value::UInt Value::maxUInt = Value::UInt(-1); -// Our "safe" implementation of strdup. Allow null pointer to be passed. +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. inline char *safeStringDup( const char *czstring ) { @@ -49,45 +50,45 @@ inline char *safeStringDup( const std::string &str ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -// class Value::IteratorBase +// class ValueIteratorBase // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -Value::IteratorBase::IteratorBase() +ValueIteratorBase::ValueIteratorBase() { } -Value::IteratorBase::IteratorBase( const ObjectValues::iterator ¤t ) +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) : current_( current ) { } Value & -Value::IteratorBase::deref() const +ValueIteratorBase::deref() const { return current_->second; } void -Value::IteratorBase::increment() +ValueIteratorBase::increment() { ++current_; } void -Value::IteratorBase::decrement() +ValueIteratorBase::decrement() { --current_; } -Value::IteratorBase::difference_type -Value::IteratorBase::computeDistance( const SelfType &other ) const +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const { # ifdef JSON_USE_CPPTL_SMALLMAP return current_ - other.current_; @@ -98,14 +99,14 @@ Value::IteratorBase::computeDistance( const SelfType &other ) const bool -Value::IteratorBase::isEqual( const SelfType &other ) const +ValueIteratorBase::isEqual( const SelfType &other ) const { return current_ == other.current_; } void -Value::IteratorBase::copy( const SelfType &other ) +ValueIteratorBase::copy( const SelfType &other ) { current_ = other.current_; } @@ -114,23 +115,23 @@ Value::IteratorBase::copy( const SelfType &other ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -// class Value::const_iterator +// class ValueConstIterator // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -Value::const_iterator::const_iterator() +ValueConstIterator::ValueConstIterator() { } -Value::const_iterator::const_iterator( const ObjectValues::iterator ¤t ) - : IteratorBase( current ) +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) { } -Value::const_iterator::SelfType & -Value::const_iterator::operator =( const IteratorBase &other ) +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) { copy( other ); return *this; @@ -140,33 +141,33 @@ Value::const_iterator::operator =( const IteratorBase &other ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -// class Value::iterator +// class ValueIterator // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -Value::iterator::iterator() +ValueIterator::ValueIterator() { } -Value::iterator::iterator( const ObjectValues::iterator ¤t ) - : IteratorBase( current ) +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) { } -Value::iterator::iterator( const const_iterator &other ) - : IteratorBase( other ) +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) { } -Value::iterator::iterator( const iterator &other ) - : IteratorBase( other ) +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) { } -Value::iterator::SelfType & -Value::iterator::operator =( const SelfType &other ) +ValueIterator & +ValueIterator::operator =( const SelfType &other ) { copy( other ); return *this; From 6f169fa4f50ccf29ce03100f75bf8eb06fc48ec4 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 3 Jun 2006 23:15:05 +0000 Subject: [PATCH 008/268] - fixed bug in doxygen scons-tools (bad handling of windows path in configuration file) - fixed bug in substinfile.py scons-tools (bad handling of windows path in substitution variable) - doc version and output directory are now set by the build script. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@8 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 12 +++++- doc/{doxyfile => doxyfile.in} | 18 ++++---- doc/sconscript | 18 ++++++-- scons-tools/doxygen.py | 21 ++++++++-- scons-tools/substinfile.py | 79 +++++++++++++++++++++++++++++++++++ 5 files changed, 130 insertions(+), 18 deletions(-) rename doc/{doxyfile => doxyfile.in} (93%) create mode 100644 scons-tools/substinfile.py diff --git a/SConstruct b/SConstruct index 2ed8f31..772a2ce 100644 --- a/SConstruct +++ b/SConstruct @@ -2,6 +2,8 @@ import os import os.path import sys +JSONCPP_VERSION = '0.1' + options = Options() options.Add( EnumOption('platform', 'Platform (compiler/stl) used to build the project', @@ -17,6 +19,7 @@ except KeyError: print "Building using PLATFORM =", platform +rootbuild_dir = Dir('#buildscons') build_dir = os.path.join( '#buildscons', platform ) bin_dir = os.path.join( '#bin', platform ) lib_dir = os.path.join( '#libs', platform ) @@ -86,6 +89,9 @@ env['LIB_PLATFORM'] = short_platform env['LIB_LINK_TYPE'] = 'lib' # static env['LIB_CRUNTIME'] = 'mt' env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) env_testing = env.Copy( ) env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) @@ -138,9 +144,11 @@ RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) env.Alias( 'check' ) env.Tool('doxygen') +env.Tool('substinfile') +env.Tool('zip') + +env['JSONCPP_BUILD_DOC'] = ('doc' in COMMAND_LINE_TARGETS) or ('doc-dist' in COMMAND_LINE_TARGETS) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'doc' ) -##build_doc = ('doc' in COMMAND_LINE_TARGETS) or ('doc-dist' in COMMAND_LINE_TARGETS) -##if build_doc: diff --git a/doc/doxyfile b/doc/doxyfile.in similarity index 93% rename from doc/doxyfile rename to doc/doxyfile.in index be000bd..be7011e 100644 --- a/doc/doxyfile +++ b/doc/doxyfile.in @@ -4,8 +4,8 @@ # Project related configuration options #--------------------------------------------------------------------------- PROJECT_NAME = "JsonCpp" -PROJECT_NUMBER = 0.0 -OUTPUT_DIRECTORY = ..\buildscons +PROJECT_NUMBER = %JSONCPP_VERSION% +OUTPUT_DIRECTORY = %DOC_TOPDIR% CREATE_SUBDIRS = NO OUTPUT_LANGUAGE = English USE_WINDOWS_ENCODING = NO @@ -25,7 +25,7 @@ ABBREVIATE_BRIEF = "The $name class" \ ALWAYS_DETAILED_SEC = NO INLINE_INHERITED_MEMB = NO FULL_PATH_NAMES = YES -STRIP_FROM_PATH = E:\prg\vc\Lib\jsoncpp +STRIP_FROM_PATH = %TOPDIR% STRIP_FROM_INC_PATH = SHORT_NAMES = NO JAVADOC_AUTOBRIEF = NO @@ -34,7 +34,7 @@ DETAILS_AT_TOP = NO INHERIT_DOCS = YES DISTRIBUTE_GROUP_DOC = NO SEPARATE_MEMBER_PAGES = NO -TAB_SIZE = 8 +TAB_SIZE = 3 ALIASES = OPTIMIZE_OUTPUT_FOR_C = NO OPTIMIZE_OUTPUT_JAVA = NO @@ -44,14 +44,14 @@ SUBGROUPING = YES #--------------------------------------------------------------------------- EXTRACT_ALL = YES EXTRACT_PRIVATE = NO -EXTRACT_STATIC = NO +EXTRACT_STATIC = YES EXTRACT_LOCAL_CLASSES = NO EXTRACT_LOCAL_METHODS = NO HIDE_UNDOC_MEMBERS = NO HIDE_UNDOC_CLASSES = NO HIDE_FRIEND_COMPOUNDS = NO HIDE_IN_BODY_DOCS = NO -INTERNAL_DOCS = NO +INTERNAL_DOCS = YES CASE_SENSE_NAMES = NO HIDE_SCOPE_NAMES = NO SHOW_INCLUDE_FILES = YES @@ -81,9 +81,9 @@ WARN_LOGFILE = jsoncpp-doxygen-warning.log #--------------------------------------------------------------------------- # configuration options related to the input files #--------------------------------------------------------------------------- -INPUT = ../include/json ../src/lib_json . +INPUT = ../include ../src/lib_json . FILE_PATTERNS = *.h *.cpp *.dox -RECURSIVE = NO +RECURSIVE = YES EXCLUDE = EXCLUDE_SYMLINKS = NO EXCLUDE_PATTERNS = @@ -114,7 +114,7 @@ IGNORE_PREFIX = # configuration options related to the HTML output #--------------------------------------------------------------------------- GENERATE_HTML = YES -HTML_OUTPUT = json-html-doc +HTML_OUTPUT = json-html-doc-%JSONCPP_VERSION% HTML_FILE_EXTENSION = .html HTML_HEADER = header.html HTML_FOOTER = footer.html diff --git a/doc/sconscript b/doc/sconscript index 8025039..9a63ec8 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -1,5 +1,17 @@ Import( 'env' ) +import os.path -doc_cmd = env.Doxygen('doxyfile') -Alias('doc', doc_cmd) -AlwaysBuild(doc_cmd) +if 'doxygen' in env['TOOLS']: + doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', + SUBST_DICT = { + '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], + '%TOPDIR%' : env.Dir('#').abspath, + '%DOC_TOPDIR%' : str(env['ROOTBUILD_DIR'])} ) + doc_cmd = env.Doxygen( doxyfile ) + env.Alias('doc', doc_cmd) + env.AlwaysBuild(doc_cmd) + + for dir in doc_cmd: + filename = os.path.split(dir.path)[1] + zip_doc_cmd = env.Zip( '#dist/%s.zip' % filename, [env.Dir(dir)] ) + env.Alias( 'doc-dist', zip_doc_cmd ) diff --git a/scons-tools/doxygen.py b/scons-tools/doxygen.py index 0a8056a..20525ce 100644 --- a/scons-tools/doxygen.py +++ b/scons-tools/doxygen.py @@ -12,16 +12,24 @@ def DoxyfileParse(file_contents): import shlex lex = shlex.shlex(instream = file_contents, posix = True) - lex.wordchars += "*+./-" + lex.wordchars += "*+./-:" lex.whitespace = lex.whitespace.replace("\n", "") lex.escape = "" lineno = lex.lineno + last_backslash_lineno = lineno token = lex.get_token() key = token # the first token should be a key last_token = "" key_token = False next_key = False + new_data = True + + def append_data(data, key, new_data, token): + if new_data or len(data[key]) == 0: + data[key].append(token) + else: + data[key][-1] += token while token: if token in ['\n']: @@ -39,10 +47,15 @@ def DoxyfileParse(file_contents): elif token == "=": data[key] = list() else: - data[key].append(token) + append_data( data, key, new_data, token ) + new_data = True last_token = token token = lex.get_token() + + if last_token == '\\' and token != '\n': + new_data = False + append_data( data, key, new_data, '\\' ) # compress lists of len 1 into single strings for (k, v) in data.items(): @@ -103,7 +116,7 @@ def DoxySourceScan(node, env, path): else: for pattern in file_patterns: sources.extend(glob.glob("/".join([node, pattern]))) - + sources = map( lambda path: env.File(path), sources ) return sources @@ -125,7 +138,7 @@ def DoxyEmitter(source, target, env): data = DoxyfileParse(source[0].get_contents()) targets = [] - out_dir = os.path.sep.join( data.get("OUTPUT_DIRECTORY", ".") ) + out_dir = data.get("OUTPUT_DIRECTORY", ".") # add our output locations for (k, v) in output_formats.items(): diff --git a/scons-tools/substinfile.py b/scons-tools/substinfile.py new file mode 100644 index 0000000..2502262 --- /dev/null +++ b/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True From d9d5211c036c714f37f52baedd14d3e1ffd92f8a Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 4 Jun 2006 08:42:55 +0000 Subject: [PATCH 009/268] - added simple targz tool. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@9 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 2 +- doc/sconscript | 2 +- scons-tools/targz.py | 63 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 2 deletions(-) create mode 100644 scons-tools/targz.py diff --git a/SConstruct b/SConstruct index 772a2ce..ef44135 100644 --- a/SConstruct +++ b/SConstruct @@ -145,7 +145,7 @@ env.Alias( 'check' ) env.Tool('doxygen') env.Tool('substinfile') -env.Tool('zip') +env.Tool('targz') env['JSONCPP_BUILD_DOC'] = ('doc' in COMMAND_LINE_TARGETS) or ('doc-dist' in COMMAND_LINE_TARGETS) diff --git a/doc/sconscript b/doc/sconscript index 9a63ec8..1f3b1cc 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -13,5 +13,5 @@ if 'doxygen' in env['TOOLS']: for dir in doc_cmd: filename = os.path.split(dir.path)[1] - zip_doc_cmd = env.Zip( '#dist/%s.zip' % filename, [env.Dir(dir)] ) + zip_doc_cmd = env.TarGz( '#dist/%s.tar.gz' % filename, [env.Dir(dir)] ) env.Alias( 'doc-dist', zip_doc_cmd ) diff --git a/scons-tools/targz.py b/scons-tools/targz.py new file mode 100644 index 0000000..3d7fd5c --- /dev/null +++ b/scons-tools/targz.py @@ -0,0 +1,63 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for s in source: + if s.isdir(): + os.path.walk(str(s), visit, tar) + else: + tar.add(str(s)) # filename, arcname + tar.close() + +targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL']) + +TarGzBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + + +def generate(env): + """Add Builders and construction variables for zip to an Environment.""" + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + +def exists(env): + return internal_targz From 48598bf409380a2338724937d6308541095d96e2 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 4 Jun 2006 10:38:41 +0000 Subject: [PATCH 010/268] - doc tarball now contains just the documentation folder and not the build directory. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@10 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- doc/sconscript | 6 ++++-- scons-tools/targz.py | 36 +++++++++++++++++++++++++----------- 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/doc/sconscript b/doc/sconscript index 1f3b1cc..2244113 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -2,16 +2,18 @@ Import( 'env' ) import os.path if 'doxygen' in env['TOOLS']: + doc_topdir = env['ROOTBUILD_DIR'] doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', SUBST_DICT = { '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], '%TOPDIR%' : env.Dir('#').abspath, - '%DOC_TOPDIR%' : str(env['ROOTBUILD_DIR'])} ) + '%DOC_TOPDIR%' : str(doc_topdir) } ) doc_cmd = env.Doxygen( doxyfile ) env.Alias('doc', doc_cmd) env.AlwaysBuild(doc_cmd) for dir in doc_cmd: filename = os.path.split(dir.path)[1] - zip_doc_cmd = env.TarGz( '#dist/%s.tar.gz' % filename, [env.Dir(dir)] ) + zip_doc_cmd = env.TarGz( '#dist/%s.tar.gz' % filename, [env.Dir(dir)], + TARGZ_BASEDIR = doc_topdir ) env.Alias( 'doc-dist', zip_doc_cmd ) diff --git a/scons-tools/targz.py b/scons-tools/targz.py index 3d7fd5c..83549f3 100644 --- a/scons-tools/targz.py +++ b/scons-tools/targz.py @@ -4,7 +4,7 @@ """ -## +## Commands to tackle a command based implementation: ##to unpack on the fly... ##gunzip < FILE.tar.gz | tar xvf - ##to pack on the fly... @@ -26,24 +26,32 @@ TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 if internal_targz: - def targz(target, source, env): + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + def visit(tar, dirname, names): for name in names: path = os.path.join(dirname, name) if os.path.isfile(path): - tar.add(path) + tar.add(path, archive_name(path) ) compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) target_path = str(target[0]) fileobj = gzip.GzipFile( target_path, 'wb', compression ) tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for s in source: - if s.isdir(): - os.path.walk(str(s), visit, tar) - else: - tar.add(str(s)) # filename, arcname + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname tar.close() -targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL']) +targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) TarGzBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), source_factory = SCons.Node.FS.Entry, @@ -53,11 +61,17 @@ def visit(tar, dirname, names): def generate(env): - """Add Builders and construction variables for zip to an Environment.""" + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ env['BUILDERS']['TarGz'] = TarGzBuilder env['TARGZ_COM'] = targzAction env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. def exists(env): return internal_targz From a0a70c0e8ef035022476e23abb10c7b73f265c98 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 4 Jun 2006 11:32:56 +0000 Subject: [PATCH 011/268] - added short introduction and build instruction. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@11 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- README | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 README diff --git a/README b/README new file mode 100644 index 0000000..8db1cd7 --- /dev/null +++ b/README @@ -0,0 +1,38 @@ +* Introduction: + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, and handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + +* Building/Testing: + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + doc: build documentation + doc-dist: build documentation tarball + From 44a96e9fb137180ec113a0a453b51502683a1c42 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 5 Jun 2006 11:05:39 +0000 Subject: [PATCH 012/268] - doxygen tool: fixed bug in output (is env.Dir instead of File) - added srcdist tool: generate tarball of source files. Dependencies retreival is still missing. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@12 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 27 ++++- doc/sconscript | 3 +- scons-tools/doxygen.py | 2 +- scons-tools/srcdist.py | 179 ++++++++++++++++++++++++++++++++++ scons-tools/targz.py | 15 +-- src/jsontestrunner/sconscript | 2 +- 6 files changed, 213 insertions(+), 15 deletions(-) create mode 100644 scons-tools/srcdist.py diff --git a/SConstruct b/SConstruct index ef44135..e9c507e 100644 --- a/SConstruct +++ b/SConstruct @@ -3,6 +3,7 @@ import os.path import sys JSONCPP_VERSION = '0.1' +DIST_DIR = '#dist' options = Options() options.Add( EnumOption('platform', @@ -80,6 +81,11 @@ else: print "UNSUPPORTED PLATFORM." env.Exit(1) +env.Tool('doxygen') +env.Tool('substinfile') +env.Tool('targz') +env.Tool('srcdist') + env.Append( CPPPATH = ['#include'], LIBPATH = lib_dir ) short_platform = platform @@ -92,6 +98,15 @@ env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # mu env['JSONCPP_VERSION'] = JSONCPP_VERSION env['BUILD_DIR'] = env.Dir(build_dir) env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) +env['SRCDIST_BUILDER'] = env.TarGz env_testing = env.Copy( ) env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) @@ -101,6 +116,7 @@ def buildJSONExample( env, target_sources, target_name ): env.Append( CPPPATH = ['#'] ) exe = env.Program( target=target_name, source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) global bin_dir return env.Install( bin_dir, exe ) @@ -114,6 +130,7 @@ def buildLibary( env, target_sources, target_name ): source=target_sources ) global lib_dir env.Install( lib_dir, static_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) Export( 'env env_testing buildJSONExample buildLibary buildJSONTests' ) @@ -122,6 +139,7 @@ def buildProjectInDirectory( target_directory ): target_build_dir = os.path.join( build_dir, target_directory ) target = os.path.join( target_directory, 'sconscript' ) SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) def runJSONTests_action( target, source = None, env = None ): @@ -143,11 +161,10 @@ RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) env.Alias( 'check' ) -env.Tool('doxygen') -env.Tool('substinfile') -env.Tool('targz') - -env['JSONCPP_BUILD_DOC'] = ('doc' in COMMAND_LINE_TARGETS) or ('doc-dist' in COMMAND_LINE_TARGETS) +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) diff --git a/doc/sconscript b/doc/sconscript index 2244113..3e4eebb 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -14,6 +14,7 @@ if 'doxygen' in env['TOOLS']: for dir in doc_cmd: filename = os.path.split(dir.path)[1] - zip_doc_cmd = env.TarGz( '#dist/%s.tar.gz' % filename, [env.Dir(dir)], + targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) + zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], TARGZ_BASEDIR = doc_topdir ) env.Alias( 'doc-dist', zip_doc_cmd ) diff --git a/scons-tools/doxygen.py b/scons-tools/doxygen.py index 20525ce..4bd71cf 100644 --- a/scons-tools/doxygen.py +++ b/scons-tools/doxygen.py @@ -143,7 +143,7 @@ def DoxyEmitter(source, target, env): # add our output locations for (k, v) in output_formats.items(): if data.get("GENERATE_" + k, v[0]) == "YES": - targets.append("/".join([out_dir, data.get(k + "_OUTPUT", v[1])])) + targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) # don't clobber targets for node in targets: diff --git a/scons-tools/srcdist.py b/scons-tools/srcdist.py new file mode 100644 index 0000000..cfc5407 --- /dev/null +++ b/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +import glob +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return True diff --git a/scons-tools/targz.py b/scons-tools/targz.py index 83549f3..2f21204 100644 --- a/scons-tools/targz.py +++ b/scons-tools/targz.py @@ -52,13 +52,14 @@ def visit(tar, dirname, names): tar.close() targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - -TarGzBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - + +def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) +TarGzBuilder = makeBuilder() def generate(env): """Add Builders and construction variables for zip to an Environment. diff --git a/src/jsontestrunner/sconscript b/src/jsontestrunner/sconscript index bc46095..f81a2dc 100644 --- a/src/jsontestrunner/sconscript +++ b/src/jsontestrunner/sconscript @@ -3,4 +3,4 @@ Import( 'env_testing buildJSONTests' ) buildJSONTests( env_testing, Split( """ main.cpp """ ), - 'jsontest' ) + 'jsontestrunner' ) From 2b5ebb450a46f9ce797b499e4adc9908db1f7984 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 5 Jun 2006 12:26:11 +0000 Subject: [PATCH 013/268] - added another parse interface taking a pair of const char *. - removed unused header. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@13 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/reader.h | 12 ++++++++++++ include/json/util.h | 12 ------------ src/lib_json/json_reader.cpp | 16 +++++++++++++--- 3 files changed, 25 insertions(+), 15 deletions(-) delete mode 100644 include/json/util.h diff --git a/include/json/reader.h b/include/json/reader.h index 27225f8..60594d9 100644 --- a/include/json/reader.h +++ b/include/json/reader.h @@ -35,6 +35,18 @@ namespace Json { Value &root, bool collectComments = true ); + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + /** \brief Returns a user friendly string that list errors in the parsed document. * \return Formatted error message with the list of errors with their location in * the parsed document. An empty string is returned if no error occurred diff --git a/include/json/util.h b/include/json/util.h deleted file mode 100644 index 3f51ade..0000000 --- a/include/json/util.h +++ /dev/null @@ -1,12 +0,0 @@ -#ifndef JSON_UTIL_H_INCLUDED -# define JSON_UTIL_H_INCLUDED - -# include "json_config.h" -# include - -namespace Json { - - -} // namespace Json - -#endif // JSON_UTIL_H_INCLUDED diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index b69118c..cfdcfca 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -46,10 +46,20 @@ Reader::parse( const std::string &document, Value &root, bool collectComments ) { - collectComments_ = collectComments; document_ = document; - begin_ = document_.c_str(); - end_ = begin_ + document_.length(); + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; current_ = begin_; lastValueEnd_ = 0; lastValue_ = 0; From e6cff40836b9af35482839b6653acaf2e729d032 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 13 Jun 2006 06:14:55 +0000 Subject: [PATCH 014/268] - added instruction to download and install scons-local. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@14 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- README | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README b/README index 8db1cd7..528e7dd 100644 --- a/README +++ b/README @@ -17,6 +17,12 @@ Unserialization parsing is user friendly and provides precise error reports. JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires python to be installed (http://www.python.org). +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + python scons.py platform=PLTFRM [TARGET] where PLTFRM may be one of: suncc Sun C++ (Solaris) From 1db8bd8a8cccc540d1842cd9f98dd1dde466f557 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 18 Jun 2006 14:38:39 +0000 Subject: [PATCH 015/268] - added (untested) accessors to get index or member name when iterating over members. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@15 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/value.h | 12 +++++++++++- src/lib_json/json_value.cpp | 28 ++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/include/json/value.h b/include/json/value.h index 5abbf65..88e5889 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -72,6 +72,7 @@ namespace Json { */ class JSON_API Value { + friend class ValueIteratorBase; public: typedef std::vector Members; typedef int Int; @@ -83,8 +84,8 @@ namespace Json { static const Int minInt; static const Int maxInt; static const UInt maxUInt; - private: + private: class CZString { public: @@ -314,6 +315,15 @@ namespace Json { return computeDistance( other ); } + /// Returns either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Returns the index of the referenced Value. -1 if it is not an arrayValue. + Value::UInt index() const; + + /// Returns the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + protected: Value &deref() const; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 813cddc..a21acea 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -112,6 +112,34 @@ ValueIteratorBase::copy( const SelfType &other ) } +Value +ValueIteratorBase::key() const +{ + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + return Value( czstring.c_str() ); + return Value( czstring.index() ); +} + + +Value::UInt +ValueIteratorBase::index() const +{ + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +} + + +const char * +ValueIteratorBase::memberName() const +{ + const char *name = (*current_).first.c_str(); + return name ? name : ""; +} + + // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// From a464fc774418aa049d995d819b6253f0c1fb73f9 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 18 Jun 2006 14:40:13 +0000 Subject: [PATCH 016/268] - correct include path are now generated in the documentation. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@16 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- doc/doxyfile.in | 2 +- scons-tools/doxygen.py | 28 ++++++++++++++++++++++------ 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/doc/doxyfile.in b/doc/doxyfile.in index be7011e..68589b9 100644 --- a/doc/doxyfile.in +++ b/doc/doxyfile.in @@ -26,7 +26,7 @@ ALWAYS_DETAILED_SEC = NO INLINE_INHERITED_MEMB = NO FULL_PATH_NAMES = YES STRIP_FROM_PATH = %TOPDIR% -STRIP_FROM_INC_PATH = +STRIP_FROM_INC_PATH = %TOPDIR%/include SHORT_NAMES = NO JAVADOC_AUTOBRIEF = NO MULTILINE_CPP_IS_BRIEF = NO diff --git a/scons-tools/doxygen.py b/scons-tools/doxygen.py index 4bd71cf..f85f4a3 100644 --- a/scons-tools/doxygen.py +++ b/scons-tools/doxygen.py @@ -1,3 +1,7 @@ +# Big issue: +# emitter depends on doxyfile which is generated from doxyfile.in. +# build fails after cleaning and relaunching the build. + import os import os.path import glob @@ -99,10 +103,16 @@ def DoxySourceScan(node, env, path): file_patterns = data.get("FILE_PATTERNS", default_file_patterns) exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) + doxyfile_dir = str( node.dir ) + +## print 'running from', os.getcwd() for node in data.get("INPUT", []): - if os.path.isfile(node): - sources.add(node) - elif os.path.isdir(node): + node_real_path = os.path.normpath( os.path.join( doxyfile_dir, node ) ) + if os.path.isfile(node_real_path): +## print str(node), 'is a file' + sources.append(node) + elif os.path.isdir(node_real_path): +## print str(node), 'is a directory' if recursive: for root, dirs, files in os.walk(node): for f in files: @@ -113,9 +123,12 @@ def DoxySourceScan(node, env, path): if pattern_check and not exclude_check: sources.append(filename) +## print ' adding source', os.path.abspath( filename ) else: for pattern in file_patterns: - sources.extend(glob.glob("/".join([node, pattern]))) + sources.extend(glob.glob(os.path.join( node, pattern))) +## else: +## print str(node), 'is neither a file nor a directory' sources = map( lambda path: env.File(path), sources ) return sources @@ -135,6 +148,7 @@ def DoxyEmitter(source, target, env): "XML": ("NO", "xml"), } +## print '#### DoxyEmitter:', source[0].abspath, os.path.exists( source[0].abspath ) data = DoxyfileParse(source[0].get_contents()) targets = [] @@ -151,7 +165,8 @@ def DoxyEmitter(source, target, env): # set up cleaning stuff for node in targets: - env.Clean(node, node) + clean_cmd = env.Clean(node, node) + env.Depends( clean_cmd, source ) return (targets, source) @@ -167,7 +182,8 @@ def generate(env): ) doxyfile_builder = env.Builder( - action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}"), + action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}", + varlist=['$SOURCES']), emitter = DoxyEmitter, target_factory = env.fs.Entry, single_source = True, From d564d3ebb2eec4d0e30fd5f72b733b4ab8bcd5d5 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 18 Jun 2006 14:46:48 +0000 Subject: [PATCH 017/268] - memory footprint of Json::Value is back to 16 bytes on WIN32. (using bool allocated_:1 caused the compiler not to merge the attribute with the previous bitfield. Switched to int type to fix the problem). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@17 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/value.h | 2 +- src/lib_json/json_value.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/include/json/value.h b/include/json/value.h index 88e5889..ac5935d 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -282,7 +282,7 @@ namespace Json { ObjectValues *map_; } value_; ValueType type_ : 8; - bool allocated_ : 1; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. CommentInfo *comments_; }; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index a21acea..cef11b7 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -497,7 +497,7 @@ Value::swap( Value &other ) type_ = other.type_; other.type_ = temp; std::swap( value_, other.value_ ); - bool temp2 = allocated_; + int temp2 = allocated_; allocated_ = other.allocated_; other.allocated_ = temp2; } From c600d986b012abf0d8eec2e0f1d442c65177fa92 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 24 Jun 2006 11:35:07 +0000 Subject: [PATCH 018/268] - Value may now (optionally) use an internal hash-map and a simplified deque for objectValue and arrayValue storage. - iterator related code extracted into src/lib_json/json_valueiterator.inl git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@18 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/config.h | 5 + include/json/forwards.h | 7 + include/json/value.h | 447 ++++++++++++++++++++----- src/jsontestrunner/main.cpp | 3 +- src/lib_json/json_internalarray.inl | 364 ++++++++++++++++++++ src/lib_json/json_internalmap.inl | 495 ++++++++++++++++++++++++++++ src/lib_json/json_value.cpp | 427 ++++++++++++++---------- src/lib_json/json_valueiterator.inl | 249 ++++++++++++++ src/lib_json/lib_json.vcproj | 9 + 9 files changed, 1739 insertions(+), 267 deletions(-) create mode 100644 src/lib_json/json_internalarray.inl create mode 100644 src/lib_json/json_internalmap.inl create mode 100644 src/lib_json/json_valueiterator.inl diff --git a/include/json/config.h b/include/json/config.h index c05aa73..88ac572 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -6,7 +6,12 @@ /// If defined, indicates that json may leverage CppTL library //# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. //# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +//# define JSON_VALUE_USE_INTERNAL_MAP 1 # ifdef JSON_IN_CPPTL diff --git a/include/json/forwards.h b/include/json/forwards.h index b168fd2..1e61cc0 100644 --- a/include/json/forwards.h +++ b/include/json/forwards.h @@ -16,6 +16,13 @@ namespace Json { class ValueIteratorBase; class ValueIterator; class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP } // namespace Json diff --git a/include/json/value.h b/include/json/value.h index ac5935d..916bb83 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -73,12 +73,17 @@ namespace Json { class JSON_API Value { friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif public: typedef std::vector Members; typedef int Int; typedef unsigned int UInt; typedef ValueIterator iterator; typedef ValueConstIterator const_iterator; + typedef UInt ArrayIndex; static const Value null; static const Int minInt; @@ -86,6 +91,7 @@ namespace Json { static const UInt maxUInt; private: +# ifndef JSON_VALUE_USE_INTERNAL_MAP class CZString { public: @@ -111,11 +117,12 @@ namespace Json { }; public: -# ifndef JSON_USE_CPPTL_SMALLMAP +# ifndef JSON_USE_CPPTL_SMALLMAP typedef std::map ObjectValues; -# else +# else typedef CppTL::SmallMap ObjectValues; -# endif +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP public: Value( ValueType type = nullValue ); @@ -232,10 +239,10 @@ namespace Json { // Returns a list of the member names. Members getMemberNames() const; -# ifdef JSON_USE_CPPTL - EnumMemberNames enumMemberNames() const; - EnumValues enumValues() const; -# endif +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif void setComment( const char *comment, CommentPlacement placement ); @@ -252,6 +259,19 @@ namespace Json { iterator begin(); iterator end(); + private: +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + private: struct CommentInfo { @@ -263,14 +283,14 @@ namespace Json { char *comment_; }; - struct MemberNamesTransform - { - typedef const char *result_type; - const char *operator()( const CZString &name ) const - { - return name.c_str(); - } - }; + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; union ValueHolder { @@ -279,14 +299,313 @@ namespace Json { double real_; bool bool_; char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else ObjectValues *map_; +# endif } value_; ValueType type_ : 8; int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. +# endif CommentInfo *comments_; }; + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( Value::UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + Value::UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + class ValueAllocator + { + public: + enum { unknown = -1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName() = 0; + virtual void releaseMemberName() = 0; + virtual char *duplicateValue( const char *value, unsigned int length = unknown ) = 0; + virtual void releaseValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + + virtual ValueInternalLink *allocateBuckets( unsigned int size ) = 0; + virtual void releaseBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateLink() = 0; + virtual void releaseLink( ValueInternalLink *link ) = 0; + }; + + /** \brief Link of hash-map used to store arrayValue and objectValue. + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) + { + } + + ~ValueInternalLink() + { // assumes there is only memberName + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + free( keys_[index] ); + else + break; + } + } + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + + struct IteratorState + { + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; + + /* When the bucket is allocated, one page is immediately allocated for each bucket. + Each bucket is made up of a chained list of ValueInternalLink. The last + link of a given bucket can be found in the 'previous_' field of the following bucket. + The last link of the last bucket is stored in tailLink_ as it has no following bucket. + Only the last link of a bucket may contains 'available' item. The last link always + contains at least one element unless is it the bucket one very first link. + */ + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ); + + Value &unsafeAdd( const char *key, HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + + struct IteratorState // Must be a POD + { + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + /** \brief Experimental and untested: base class for Value iterators. * */ @@ -298,7 +617,12 @@ namespace Json { typedef ValueIteratorBase SelfType; ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif bool operator ==( const SelfType &other ) const { @@ -338,7 +662,16 @@ namespace Json { void copy( const SelfType &other ); private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP Value::ObjectValues::iterator current_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif }; /** \brief Experimental and untested: const iterator for object and array value. @@ -346,6 +679,7 @@ namespace Json { */ class ValueConstIterator : public ValueIteratorBase { + friend class Value; public: typedef unsigned int size_t; typedef int difference_type; @@ -354,9 +688,16 @@ namespace Json { typedef ValueConstIterator SelfType; ValueConstIterator(); + private: /*! \internal Use by Value to create an iterator. */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: SelfType &operator =( const ValueIteratorBase &other ); SelfType operator++( int ) @@ -396,6 +737,7 @@ namespace Json { */ class ValueIterator : public ValueIteratorBase { + friend class Value; public: typedef unsigned int size_t; typedef int difference_type; @@ -406,9 +748,16 @@ namespace Json { ValueIterator(); ValueIterator( const ValueConstIterator &other ); ValueIterator( const ValueIterator &other ); + private: /*! \internal Use by Value to create an iterator. */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: SelfType &operator =( const SelfType &other ); @@ -445,74 +794,6 @@ namespace Json { }; - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( Value::UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - Value::UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - } // namespace Json diff --git a/src/jsontestrunner/main.cpp b/src/jsontestrunner/main.cpp index 76c7c12..1d43720 100644 --- a/src/jsontestrunner/main.cpp +++ b/src/jsontestrunner/main.cpp @@ -1,5 +1,5 @@ #include -//#include +#include // sort #include #if defined(_MSC_VER) && _MSC_VER >= 1310 @@ -65,6 +65,7 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) { fprintf( fout, "%s={}\n", path.c_str() ); Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); std::string suffix = *(path.end()-1) == '.' ? "" : "."; for ( Json::Value::Members::iterator it = members.begin(); it != members.end(); diff --git a/src/lib_json/json_internalarray.inl b/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..6fc5084 --- /dev/null +++ b/src/lib_json/json_internalarray.inl @@ -0,0 +1,364 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/src/lib_json/json_internalmap.inl b/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..9956d30 --- /dev/null +++ b/src/lib_json/json_internalmap.inl @@ -0,0 +1,495 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalLink *allocateBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateLink() + { + return new ValueInternalLink(); + } + + virtual void releaseLink( ValueInternalLink *link ) + { + delete link; + } +}; + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const char *memberName = key( it ); + const Value &aValue = value( it ); + resolveReference(memberName) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseLink( linkToRelease ); + } + } + mapAllocator()->releaseBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ) +{ + char *duplicatedKey = safeStringDup( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index cef11b7..8416e68 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -4,7 +4,6 @@ #include "assert.h" #ifdef JSON_USE_CPPTL # include -# include #endif #include // size_t @@ -19,6 +18,12 @@ const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); const Value::UInt Value::maxUInt = Value::UInt(-1); +ValueAllocator::~ValueAllocator() +{ +} + + + // A "safe" implementation of strdup. Allow null pointer to be passed. // Also avoid warning on msvc80. @@ -47,160 +52,20 @@ inline char *safeStringDup( const std::string &str ) return 0; } -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -{ -} - - -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) -{ -} - - -Value & -ValueIteratorBase::deref() const -{ - return current_->second; -} - - -void -ValueIteratorBase::increment() -{ - ++current_; -} - - -void -ValueIteratorBase::decrement() -{ - --current_; -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - return difference_type( std::distance( current_, other.current_ ) ); -# endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ - return current_ == other.current_; -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ - current_ = other.current_; -} - - -Value -ValueIteratorBase::key() const -{ - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - return Value( czstring.c_str() ); - return Value( czstring.index() ); -} - - -Value::UInt -ValueIteratorBase::index() const -{ - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -} - - -const char * -ValueIteratorBase::memberName() const -{ - const char *name = (*current_).first.c_str(); - return name ? name : ""; -} - // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -// class ValueConstIterator +// ValueInternals... // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator() -{ -} - - -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - +# include "json_valueiterator.inl" // ////////////////////////////////////////////////////////////////// @@ -240,6 +105,7 @@ Value::CommentInfo::setComment( const char *text ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP // Notes: index_ indicates if the string was allocated when // a string is stored. @@ -314,6 +180,7 @@ Value::CZString::c_str() const { return cstr_; } +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP // ////////////////////////////////////////////////////////////////// @@ -324,11 +191,13 @@ Value::CZString::c_str() const // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// - Value::Value( ValueType type ) : type_( type ) , comments_( 0 ) , allocated_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { switch ( type ) { @@ -344,10 +213,19 @@ Value::Value( ValueType type ) case stringValue: value_.string_ = 0; break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: case objectValue: value_.map_ = new ObjectValues(); break; +#else + case arrayValue: + value_.array_ = new ValueInternalArray(); + break; + case objectValue: + value_.map_ = new ValueInternalMap(); + break; +#endif case booleanValue: value_.bool_ = false; break; @@ -360,6 +238,9 @@ Value::Value( ValueType type ) Value::Value( Int value ) : type_( intValue ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.int_ = value; } @@ -368,6 +249,9 @@ Value::Value( Int value ) Value::Value( UInt value ) : type_( uintValue ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.uint_ = value; } @@ -375,6 +259,9 @@ Value::Value( UInt value ) Value::Value( double value ) : type_( realValue ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.real_ = value; } @@ -383,6 +270,9 @@ Value::Value( const char *value ) : type_( stringValue ) , allocated_( true ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.string_ = safeStringDup( value ); } @@ -391,6 +281,9 @@ Value::Value( const std::string &value ) : type_( stringValue ) , allocated_( true ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.string_ = safeStringDup( value ); @@ -400,6 +293,9 @@ Value::Value( const CppTL::ConstString &value ) : type_( stringValue ) , allocated_( true ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.string_ = safeStringDup( value ); } @@ -408,6 +304,9 @@ Value::Value( const CppTL::ConstString &value ) Value::Value( bool value ) : type_( booleanValue ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { value_.bool_ = value; } @@ -416,6 +315,9 @@ Value::Value( bool value ) Value::Value( const Value &other ) : type_( other.type_ ) , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif { switch ( type_ ) { @@ -435,11 +337,19 @@ Value::Value( const Value &other ) else value_.string_ = 0; break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: case objectValue: value_.map_ = new ObjectValues( *other.value_.map_ ); - // @todo for each, reset parent... break; +#else + case arrayValue: + value_.array_ = new ValueInternalArray( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = new ValueInternalMap( *other.value_.map_ ); + break; +#endif default: JSON_ASSERT_UNREACHABLE; } @@ -470,10 +380,19 @@ Value::~Value() if ( allocated_ ) free( value_.string_ ); break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: case objectValue: delete value_.map_; break; +#else + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + break; +#endif default: JSON_ASSERT_UNREACHABLE; } @@ -563,6 +482,7 @@ Value::operator <( const Value &other ) const || ( other.value_.string_ && value_.string_ && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: case objectValue: { @@ -571,6 +491,12 @@ Value::operator <( const Value &other ) const return delta < 0; return (*value_.map_) < (*other.value_.map_); } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif default: JSON_ASSERT_UNREACHABLE; } @@ -617,10 +543,17 @@ Value::operator ==( const Value &other ) const || ( other.value_.string_ && value_.string_ && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: case objectValue: return value_.map_->size() == other.value_.map_->size() && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif default: JSON_ASSERT_UNREACHABLE; } @@ -841,6 +774,7 @@ Value::size() const case booleanValue: case stringValue: return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: // size of the array is highest index + 1 if ( !value_.map_->empty() ) { @@ -851,6 +785,12 @@ Value::size() const return 0; case objectValue: return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif default: JSON_ASSERT_UNREACHABLE; } @@ -865,9 +805,19 @@ Value::clear() switch ( type_ ) { - case arrayValue: // size of the array is highest index + 1 +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: case objectValue: value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif default: break; } @@ -879,6 +829,7 @@ Value::resize( UInt newSize ) JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP UInt oldSize = size(); if ( newSize == 0 ) clear(); @@ -890,6 +841,9 @@ Value::resize( UInt newSize ) value_.map_->erase( index ); assert( size() == newSize ); } +#else + value_.array_->resize( newSize ); +#endif } @@ -899,6 +853,7 @@ Value::operator[]( UInt index ) JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP CZString key( index ); ObjectValues::iterator it = value_.map_->lower_bound( key ); if ( it != value_.map_->end() && (*it).first == key ) @@ -907,6 +862,9 @@ Value::operator[]( UInt index ) ObjectValues::value_type defaultValue( key, null ); it = value_.map_->insert( it, defaultValue ); return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif } @@ -916,11 +874,16 @@ Value::operator[]( UInt index ) const JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP CZString key( index ); ObjectValues::const_iterator it = value_.map_->find( key ); if ( it == value_.map_->end() ) return null; return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif } @@ -930,6 +893,7 @@ Value::operator[]( const char *key ) JSON_ASSERT( type_ == nullValue || type_ == objectValue ); if ( type_ == nullValue ) *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP CZString actualKey( key, CZString::duplicateOnCopy ); ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); if ( it != value_.map_->end() && (*it).first == actualKey ) @@ -939,6 +903,9 @@ Value::operator[]( const char *key ) it = value_.map_->insert( it, defaultValue ); Value &value = (*it).second; return value; +#else + return value_.map_->resolveReference( key ); +#endif } @@ -965,11 +932,16 @@ Value::operator[]( const char *key ) const JSON_ASSERT( type_ == nullValue || type_ == objectValue ); if ( type_ == nullValue ) return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP CZString actualKey( key, CZString::noDuplication ); ObjectValues::const_iterator it = value_.map_->find( actualKey ); if ( it == value_.map_->end() ) return null; return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif } @@ -1065,37 +1037,46 @@ Value::getMemberNames() const JSON_ASSERT( type_ == nullValue || type_ == objectValue ); Members members; members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP ObjectValues::const_iterator it = value_.map_->begin(); ObjectValues::const_iterator itEnd = value_.map_->end(); for ( ; it != itEnd; ++it ) members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif return members; } - -# ifdef JSON_USE_CPPTL -EnumMemberNames -Value::enumMemberNames() const -{ - if ( type_ == objectValue ) - { - return CppTL::Enum::any( CppTL::Enum::transform( - CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), - MemberNamesTransform() ) ); - } - return EnumMemberNames(); -} - - -EnumValues -Value::enumValues() const -{ - if ( type_ == objectValue || type_ == arrayValue ) - return CppTL::Enum::anyValues( *(value_.map_), - CppTL::Type() ); - return EnumValues(); -} - -# endif +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif bool @@ -1209,14 +1190,34 @@ Value::begin() const { switch ( type_ ) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else case arrayValue: case objectValue: if ( value_.map_ ) return const_iterator( value_.map_->begin() ); - // fall through default if no valid map + break; +#endif default: - return const_iterator(); + break; } + return const_iterator(); } Value::const_iterator @@ -1224,14 +1225,34 @@ Value::end() const { switch ( type_ ) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else case arrayValue: case objectValue: if ( value_.map_ ) return const_iterator( value_.map_->end() ); - // fall through default if no valid map + break; +#endif default: - return const_iterator(); + break; } + return const_iterator(); } @@ -1240,14 +1261,34 @@ Value::begin() { switch ( type_ ) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else case arrayValue: case objectValue: if ( value_.map_ ) return iterator( value_.map_->begin() ); - // fall through default if no valid map + break; +#endif default: - return iterator(); + break; } + return iterator(); } Value::iterator @@ -1255,14 +1296,34 @@ Value::end() { switch ( type_ ) { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else case arrayValue: case objectValue: if ( value_.map_ ) return iterator( value_.map_->end() ); - // fall through default if no valid map + break; +#endif default: - return iterator(); + break; } + return iterator(); } diff --git a/src/lib_json/json_valueiterator.inl b/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..cae9bea --- /dev/null +++ b/src/lib_json/json_valueiterator.inl @@ -0,0 +1,249 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + return difference_type( std::distance( current_, other.current_ ) ); +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + return Value( czstring.c_str() ); + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value( ValueInternalMap::key( iterator_.map_ ) ); +#endif +} + + +Value::UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/src/lib_json/lib_json.vcproj b/src/lib_json/lib_json.vcproj index 048d828..aa64706 100644 --- a/src/lib_json/lib_json.vcproj +++ b/src/lib_json/lib_json.vcproj @@ -175,12 +175,21 @@ + + + + + + From 58bd259a1833da861aa67a74541accb8918ae79b Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 24 Jun 2006 12:36:50 +0000 Subject: [PATCH 019/268] - added experimental StaticString support to avoid useless string duplication in Value constructor or member association. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@19 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/config.h | 1 + include/json/forwards.h | 1 + include/json/value.h | 114 ++++++++++++++++++++++++---- src/lib_json/json_internalmap.inl | 32 ++++++-- src/lib_json/json_value.cpp | 39 +++++++++- src/lib_json/json_valueiterator.inl | 10 ++- 6 files changed, 170 insertions(+), 27 deletions(-) diff --git a/include/json/config.h b/include/json/config.h index 88ac572..e1122d6 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -11,6 +11,7 @@ //# define JSON_USE_CPPTL_SMALLMAP 1 /// If defined, indicates that Json specific container should be used /// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! //# define JSON_VALUE_USE_INTERNAL_MAP 1 diff --git a/include/json/forwards.h b/include/json/forwards.h index 1e61cc0..704d6e4 100644 --- a/include/json/forwards.h +++ b/include/json/forwards.h @@ -10,6 +10,7 @@ namespace Json { class StyledWriter; // value.h + class StaticString; class Path; class PathArgument; class Value; diff --git a/include/json/value.h b/include/json/value.h index 916bb83..378ed3d 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -38,10 +38,46 @@ namespace Json { numberOfCommentPlacement }; -# ifdef JSON_USE_CPPTL - typedef CppTL::AnyEnumerator EnumMemberNames; - typedef CppTL::AnyEnumerator EnumValues; -# endif +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to identify static string. + * + * Value constructor and objectValue member assignement take advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; /** \brief Represents a JSON value. * @@ -110,6 +146,7 @@ namespace Json { bool operator==( const CZString &other ) const; int index() const; const char *c_str() const; + bool isStaticString() const; private: void swap( CZString &other ); const char *cstr_; @@ -130,6 +167,16 @@ namespace Json { Value( UInt value ); Value( double value ); Value( const char *value ); + /** \brief Constructs a value from a static string. + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); Value( const std::string &value ); # ifdef JSON_USE_CPPTL Value( const CppTL::ConstString &value ); @@ -202,18 +249,29 @@ namespace Json { /// Equivalent to jsonvalue[jsonvalue.size()] = value; Value &append( const Value &value ); - // Access an object value by name, create a null member if it does not exist. + /// Access an object value by name, create a null member if it does not exist. Value &operator[]( const char *key ); - // Access an object value by name, returns null if there is no member with that name. + /// Access an object value by name, returns null if there is no member with that name. const Value &operator[]( const char *key ) const; - // Access an object value by name, create a null member if it does not exist. + /// Access an object value by name, create a null member if it does not exist. Value &operator[]( const std::string &key ); - // Access an object value by name, returns null if there is no member with that name. + /// Access an object value by name, returns null if there is no member with that name. const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); # ifdef JSON_USE_CPPTL - // Access an object value by name, create a null member if it does not exist. + /// Access an object value by name, create a null member if it does not exist. Value &operator[]( const CppTL::ConstString &key ); - // Access an object value by name, returns null if there is no member with that name. + /// Access an object value by name, returns null if there is no member with that name. const Value &operator[]( const CppTL::ConstString &key ) const; # endif /// Returns the member named key if it exist, defaultValue otherwise. @@ -260,6 +318,9 @@ namespace Json { iterator end(); private: + Value &resolveReference( const char *key, + bool isStatic ); + # ifdef JSON_VALUE_USE_INTERNAL_MAP inline bool isItemAvailable() const { @@ -270,6 +331,16 @@ namespace Json { { itemIsUsed_ = isUsed ? 1 : 0; } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } # endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP private: @@ -310,6 +381,7 @@ namespace Json { int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. # ifdef JSON_VALUE_USE_INTERNAL_MAP unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. # endif CommentInfo *comments_; }; @@ -391,7 +463,7 @@ namespace Json { virtual ~ValueAllocator(); virtual char *makeMemberName() = 0; - virtual void releaseMemberName() = 0; + virtual void releaseMemberName( char * ) = 0; virtual char *duplicateValue( const char *value, unsigned int length = unknown ) = 0; virtual void releaseValue( char *value ) = 0; }; @@ -429,11 +501,14 @@ namespace Json { } ~ValueInternalLink() - { // assumes there is only memberName + { for ( int index =0; index < itemPerLink; ++index ) { if ( !items_[index].isItemAvailable() ) - free( keys_[index] ); + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } else break; } @@ -489,7 +564,8 @@ namespace Json { Value *find( const char *key ); - Value &resolveReference( const char *key ); + Value &resolveReference( const char *key, + bool isStatic ); void remove( const char *key ); @@ -499,9 +575,14 @@ namespace Json { ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - Value &setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ); + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); - Value &unsafeAdd( const char *key, HashKey hashedKey ); + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); HashKey hash( const char *key ) const; @@ -515,6 +596,7 @@ namespace Json { static void incrementBucket( IteratorState &iterator ); static void decrement( IteratorState &iterator ); static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); static Value &value( const IteratorState &iterator ); static int distance( const IteratorState &x, const IteratorState &y ); diff --git a/src/lib_json/json_internalmap.inl b/src/lib_json/json_internalmap.inl index 9956d30..89c637d 100644 --- a/src/lib_json/json_internalmap.inl +++ b/src/lib_json/json_internalmap.inl @@ -85,9 +85,10 @@ ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) other.makeEndIterator( itEnd ); for ( ; !equals(it,itEnd); increment(it) ) { - const char *memberName = key( it ); + bool isStatic; + const char *memberName = key( it, isStatic ); const Value &aValue = value( it ); - resolveReference(memberName) = aValue; + resolveReference(memberName, isStatic) = aValue; } } @@ -204,7 +205,8 @@ ValueInternalMap::find( const char *key ) Value & -ValueInternalMap::resolveReference( const char *key ) +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) { HashKey hashedKey = hash( key ); if ( bucketsSize_ ) @@ -219,7 +221,7 @@ ValueInternalMap::resolveReference( const char *key ) for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) { if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, current, index ); + return setNewItem( key, isStatic, current, index ); if ( strcmp( key, current->keys_[index] ) == 0 ) return current->items_[index]; } @@ -227,7 +229,7 @@ ValueInternalMap::resolveReference( const char *key ) } reserveDelta( 1 ); - return unsafeAdd( key, hashedKey ); + return unsafeAdd( key, isStatic, hashedKey ); } @@ -312,18 +314,24 @@ ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) Value & -ValueInternalMap::setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ) +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) { char *duplicatedKey = safeStringDup( key ); ++itemCount_; link->keys_[index] = duplicatedKey; link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); return link->items_[index]; // items already default constructed. } Value & -ValueInternalMap::unsafeAdd( const char *key, HashKey hashedKey ) +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) { JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); BucketIndex bucketIndex = hashedKey % bucketsSize_; @@ -343,7 +351,7 @@ ValueInternalMap::unsafeAdd( const char *key, HashKey hashedKey ) previousLink = newLink; link = newLink; } - return setNewItem( key, link, index ); + return setNewItem( key, isStatic, link, index ); } @@ -475,6 +483,14 @@ ValueInternalMap::key( const IteratorState &iterator ) return iterator.link_->keys_[iterator.itemIndex_]; } +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + Value & ValueInternalMap::value( const IteratorState &iterator ) diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 8416e68..3b3b7bf 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -180,6 +180,13 @@ Value::CZString::c_str() const { return cstr_; } + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + #endif // ifndef JSON_VALUE_USE_INTERNAL_MAP @@ -288,6 +295,19 @@ Value::Value( const std::string &value ) value_.string_ = safeStringDup( value ); } + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + # ifdef JSON_USE_CPPTL Value::Value( const CppTL::ConstString &value ) : type_( stringValue ) @@ -889,12 +909,21 @@ Value::operator[]( UInt index ) const Value & Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) { JSON_ASSERT( type_ == nullValue || type_ == objectValue ); if ( type_ == nullValue ) *this = Value( objectValue ); #ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::duplicateOnCopy ); + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); if ( it != value_.map_->end() && (*it).first == actualKey ) return (*it).second; @@ -904,7 +933,7 @@ Value::operator[]( const char *key ) Value &value = (*it).second; return value; #else - return value_.map_->resolveReference( key ); + return value_.map_->resolveReference( key, isStatic ); #endif } @@ -958,6 +987,12 @@ Value::operator[]( const std::string &key ) const return (*this)[ key.c_str() ]; } +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + # ifdef JSON_USE_CPPTL Value & diff --git a/src/lib_json/json_valueiterator.inl b/src/lib_json/json_valueiterator.inl index cae9bea..c84c47d 100644 --- a/src/lib_json/json_valueiterator.inl +++ b/src/lib_json/json_valueiterator.inl @@ -123,12 +123,20 @@ ValueIteratorBase::key() const #ifndef JSON_VALUE_USE_INTERNAL_MAP const Value::CZString czstring = (*current_).first; if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); return Value( czstring.c_str() ); + } return Value( czstring.index() ); #else if ( isArray_ ) return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value( ValueInternalMap::key( iterator_.map_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); #endif } From 86c3c77a6340fe4c1202fa17017fecdf49c152aa Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 24 Jun 2006 14:15:57 +0000 Subject: [PATCH 020/268] - added memory pool based allocator. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@20 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/config.h | 5 ++ include/json/value.h | 129 ++++++++++++++++++++++++++-- src/lib_json/json_batchallocator.h | 121 ++++++++++++++++++++++++++ src/lib_json/json_internalarray.inl | 84 ++++++++++++++++++ src/lib_json/json_internalmap.inl | 88 +++++++++++++++++-- src/lib_json/json_value.cpp | 19 ++-- src/lib_json/lib_json.vcproj | 3 + 7 files changed, 428 insertions(+), 21 deletions(-) create mode 100644 src/lib_json/json_batchallocator.h diff --git a/include/json/config.h b/include/json/config.h index e1122d6..c1bcb3f 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -13,6 +13,11 @@ /// (hash table & simple deque container with customizable allocator). /// THIS FEATURE IS STILL EXPERIMENTAL! //# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 # ifdef JSON_IN_CPPTL diff --git a/include/json/value.h b/include/json/value.h index 378ed3d..447e7c2 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -470,16 +470,60 @@ namespace Json { #ifdef JSON_VALUE_USE_INTERNAL_MAP /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode */ class JSON_API ValueMapAllocator { public: virtual ~ValueMapAllocator(); - - virtual ValueInternalLink *allocateBuckets( unsigned int size ) = 0; - virtual void releaseBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateLink() = 0; - virtual void releaseLink( ValueInternalLink *link ) = 0; + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; }; /** \brief Link of hash-map used to store arrayValue and objectValue. @@ -494,6 +538,9 @@ namespace Json { flagUsed = 1 }; + /** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ ValueInternalLink() : previous_( 0 ) , next_( 0 ) @@ -671,12 +718,82 @@ namespace Json { }; /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode */ class JSON_API ValueArrayAllocator { public: virtual ~ValueArrayAllocator(); - + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ virtual void reallocateArrayPageIndex( Value **&indexes, ValueInternalArray::PageIndex &indexCount, ValueInternalArray::PageIndex minNewIndexCount ) = 0; diff --git a/src/lib_json/json_batchallocator.h b/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..5db9db4 --- /dev/null +++ b/src/lib_json/json_batchallocator.h @@ -0,0 +1,121 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType AllocatedType; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : objectsPerPage_( objectsPerPage ) + , freeHead_( 0 ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/src/lib_json/json_internalarray.inl b/src/lib_json/json_internalarray.inl index 6fc5084..73bc9ad 100644 --- a/src/lib_json/json_internalarray.inl +++ b/src/lib_json/json_internalarray.inl @@ -16,6 +16,7 @@ ValueArrayAllocator::~ValueArrayAllocator() // ////////////////////////////////////////////////////////////////// // class DefaultValueArrayAllocator // ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR class DefaultValueArrayAllocator : public ValueArrayAllocator { public: // overridden from ValueArrayAllocator @@ -23,6 +24,21 @@ public: // overridden from ValueArrayAllocator { } + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + virtual void reallocateArrayPageIndex( Value **&indexes, ValueInternalArray::PageIndex &indexCount, ValueInternalArray::PageIndex minNewIndexCount ) @@ -55,6 +71,74 @@ public: // overridden from ValueArrayAllocator } }; +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + static ValueArrayAllocator *&arrayAllocator() { static DefaultValueArrayAllocator defaultAllocator; diff --git a/src/lib_json/json_internalmap.inl b/src/lib_json/json_internalmap.inl index 89c637d..7809f58 100644 --- a/src/lib_json/json_internalmap.inl +++ b/src/lib_json/json_internalmap.inl @@ -13,29 +13,99 @@ ValueMapAllocator::~ValueMapAllocator() { } +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR class DefaultValueMapAllocator : public ValueMapAllocator { public: // overridden from ValueMapAllocator - virtual ValueInternalLink *allocateBuckets( unsigned int size ) + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) { return new ValueInternalLink[size]; } - virtual void releaseBuckets( ValueInternalLink *links ) + virtual void releaseMapBuckets( ValueInternalLink *links ) { delete [] links; } - virtual ValueInternalLink *allocateLink() + virtual ValueInternalLink *allocateMapLink() { return new ValueInternalLink(); } - virtual void releaseLink( ValueInternalLink *link ) + virtual void releaseMapLink( ValueInternalLink *link ) { delete link; } }; +#else +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif static ValueMapAllocator *&mapAllocator() { @@ -113,10 +183,10 @@ ValueInternalMap::~ValueInternalMap() { ValueInternalLink *linkToRelease = link; link = link->next_; - mapAllocator()->releaseLink( linkToRelease ); + mapAllocator()->releaseMapLink( linkToRelease ); } } - mapAllocator()->releaseBuckets( buckets_ ); + mapAllocator()->releaseMapBuckets( buckets_ ); } } @@ -164,7 +234,7 @@ ValueInternalMap::reserve( BucketIndex newItemCount ) { if ( !buckets_ && newItemCount > 0 ) { - buckets_ = mapAllocator()->allocateBuckets( 1 ); + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); bucketsSize_ = 1; tailLink_ = &buckets_[0]; } @@ -286,7 +356,7 @@ ValueInternalMap::doActualRemove( ValueInternalLink *link, ValueInternalLink *linkPreviousToLast = lastLink->previous_; if ( linkPreviousToLast != 0 ) // can not deleted bucket link. { - mapAllocator()->releaseLink( lastLink ); + mapAllocator()->releaseMapLink( lastLink ); linkPreviousToLast->next_ = 0; lastLink = linkPreviousToLast; } @@ -345,7 +415,7 @@ ValueInternalMap::unsafeAdd( const char *key, } if ( index == ValueInternalLink::itemPerLink ) // need to add a new page { - ValueInternalLink *newLink = mapAllocator()->allocateLink(); + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); index = 0; link->next_ = newLink; previousLink = newLink; diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 3b3b7bf..da575b8 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -6,6 +6,9 @@ # include #endif #include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw @@ -198,6 +201,10 @@ Value::CZString::isStaticString() const // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ Value::Value( ValueType type ) : type_( type ) , comments_( 0 ) @@ -227,10 +234,10 @@ Value::Value( ValueType type ) break; #else case arrayValue: - value_.array_ = new ValueInternalArray(); + value_.array_ = arrayAllocator()->newArray(); break; case objectValue: - value_.map_ = new ValueInternalMap(); + value_.map_ = mapAllocator()->newMap(); break; #endif case booleanValue: @@ -364,10 +371,10 @@ Value::Value( const Value &other ) break; #else case arrayValue: - value_.array_ = new ValueInternalArray( *other.value_.array_ ); + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); break; case objectValue: - value_.map_ = new ValueInternalMap( *other.value_.map_ ); + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); break; #endif default: @@ -407,10 +414,10 @@ Value::~Value() break; #else case arrayValue: - delete value_.array_; + arrayAllocator()->destructArray( value_.array_ ); break; case objectValue: - delete value_.map_; + mapAllocator()->destructMap( value_.map_ ); break; #endif default: diff --git a/src/lib_json/lib_json.vcproj b/src/lib_json/lib_json.vcproj index aa64706..d1a6f05 100644 --- a/src/lib_json/lib_json.vcproj +++ b/src/lib_json/lib_json.vcproj @@ -175,6 +175,9 @@ + + From 8075bca10710b2ca2775d8e7068e55786aa0a304 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 24 Jun 2006 14:53:58 +0000 Subject: [PATCH 021/268] - updated build instructions - integrated build instructions in html doc. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@21 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- README => README.txt | 2 +- doc/jsoncpp.dox | 14 +++++++++++--- doc/sconscript | 5 +++-- src/lib_json/json_internalarray.inl | 2 +- src/lib_json/json_internalmap.inl | 1 + 5 files changed, 17 insertions(+), 7 deletions(-) rename README => README.txt (93%) diff --git a/README b/README.txt similarity index 93% rename from README rename to README.txt index 528e7dd..65d3629 100644 --- a/README +++ b/README.txt @@ -32,7 +32,7 @@ where PLTFRM may be one of: msvc70 Microsoft Visual Studio 2002 msvc71 Microsoft Visual Studio 2003 msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux) + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) adding platform is fairly simple. You need to change the Sconstruct file to do so. diff --git a/doc/jsoncpp.dox b/doc/jsoncpp.dox index 70a71fe..5463463 100644 --- a/doc/jsoncpp.dox +++ b/doc/jsoncpp.dox @@ -22,7 +22,7 @@ Here is an example of JSON data: ], // Tab indent size - indent : 3 + indent : { length : 3, use_space = true } } \endverbatim @@ -51,20 +51,28 @@ const Json::Value plugins = root["plug-ins"]; for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. loadPlugIn( plugins[index].asString() ); -setIndent( root.get("indent", 3).asInt() ); +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); // ... // At application shutdown to make the new configuration document: // Since Json::Value has implicit constructor for all value types, it is not // necessary to explicitely construct the Json::Value object: root["encoding"] = getCurrentEncoding(); -root["indent"] = getCurrentIndent(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); Json::StyledWriter writer; // Make a new JSON document for the configuration. Preserve original comments. std::string outputConfig = writer.write( root ); \endcode +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt \section _plinks Project links - json-cpp home diff --git a/doc/sconscript b/doc/sconscript index 3e4eebb..584835c 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -9,10 +9,11 @@ if 'doxygen' in env['TOOLS']: '%TOPDIR%' : env.Dir('#').abspath, '%DOC_TOPDIR%' : str(doc_topdir) } ) doc_cmd = env.Doxygen( doxyfile ) - env.Alias('doc', doc_cmd) - env.AlwaysBuild(doc_cmd) + alias_doc_cmd = env.Alias('doc', doc_cmd ) + env.AlwaysBuild(alias_doc_cmd) for dir in doc_cmd: + env.Alias('doc', env.Install( dir.path, '#README.txt' ) ) filename = os.path.split(dir.path)[1] targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], diff --git a/src/lib_json/json_internalarray.inl b/src/lib_json/json_internalarray.inl index 73bc9ad..2b3d859 100644 --- a/src/lib_json/json_internalarray.inl +++ b/src/lib_json/json_internalarray.inl @@ -72,7 +72,7 @@ public: // overridden from ValueArrayAllocator }; #else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - +/// @todo make this thread-safe (lock when accessign batch allocator) class DefaultValueArrayAllocator : public ValueArrayAllocator { public: // overridden from ValueArrayAllocator diff --git a/src/lib_json/json_internalmap.inl b/src/lib_json/json_internalmap.inl index 7809f58..9ff1d65 100644 --- a/src/lib_json/json_internalmap.inl +++ b/src/lib_json/json_internalmap.inl @@ -53,6 +53,7 @@ public: // overridden from ValueMapAllocator } }; #else +/// @todo make this thread-safe (lock when accessign batch allocator) class DefaultValueMapAllocator : public ValueMapAllocator { public: // overridden from ValueMapAllocator From c757bd512696bdf1cb3f8462b79484040bba4a32 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 24 Jun 2006 16:57:51 +0000 Subject: [PATCH 022/268] - added some documentation - added ValueAllocator to customize string duplication. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@22 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- doc/doxyfile.in | 2 +- doc/sconscript | 3 +- include/json/config.h | 2 +- include/json/value.h | 74 +++++++++---------- src/lib_json/json_batchallocator.h | 4 + src/lib_json/json_internalmap.inl | 27 ++++++- src/lib_json/json_value.cpp | 115 +++++++++++++++++++++-------- 7 files changed, 155 insertions(+), 72 deletions(-) diff --git a/doc/doxyfile.in b/doc/doxyfile.in index 68589b9..15ec5bd 100644 --- a/doc/doxyfile.in +++ b/doc/doxyfile.in @@ -189,7 +189,7 @@ EXPAND_ONLY_PREDEF = NO SEARCH_INCLUDES = YES INCLUDE_PATH = ../include INCLUDE_FILE_PATTERNS = *.h -PREDEFINED = +PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP EXPAND_AS_DEFINED = SKIP_FUNCTION_MACROS = YES #--------------------------------------------------------------------------- diff --git a/doc/sconscript b/doc/sconscript index 584835c..d2e27a7 100644 --- a/doc/sconscript +++ b/doc/sconscript @@ -13,9 +13,10 @@ if 'doxygen' in env['TOOLS']: env.AlwaysBuild(alias_doc_cmd) for dir in doc_cmd: - env.Alias('doc', env.Install( dir.path, '#README.txt' ) ) + env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) filename = os.path.split(dir.path)[1] targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], TARGZ_BASEDIR = doc_topdir ) + env.Depends( zip_doc_cmd, alias_doc_cmd ) env.Alias( 'doc-dist', zip_doc_cmd ) diff --git a/include/json/config.h b/include/json/config.h index c1bcb3f..ca3f999 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -12,7 +12,7 @@ /// If defined, indicates that Json specific container should be used /// (hash table & simple deque container with customizable allocator). /// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 +# define JSON_VALUE_USE_INTERNAL_MAP 1 /// Force usage of standard new/malloc based allocator instead of memory pool based allocator. /// The memory pools allocator used optimization (initializing Value and ValueInternalLink /// as if it was a POD) that may cause some validation tool to report errors. diff --git a/include/json/value.h b/include/json/value.h index 447e7c2..5b5e460 100644 --- a/include/json/value.h +++ b/include/json/value.h @@ -14,6 +14,8 @@ # include # endif +/** \brief JSON (JavaScript Object Notation). + */ namespace Json { /** \brief Type of the value held by a Value object. @@ -43,9 +45,9 @@ namespace Json { // typedef CppTL::AnyEnumerator EnumValues; //# endif - /** \brief Lightweight wrapper to identify static string. + /** \brief Lightweight wrapper to tag static string. * - * Value constructor and objectValue member assignement take advantage of the + * Value constructor and objectValue member assignement takes advantage of the * StaticString and avoid the cost of string duplication when storing the * string or the member name. * @@ -127,6 +129,7 @@ namespace Json { static const UInt maxUInt; private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION # ifndef JSON_VALUE_USE_INTERNAL_MAP class CZString { @@ -160,6 +163,7 @@ namespace Json { typedef CppTL::SmallMap ObjectValues; # endif // ifndef JSON_USE_CPPTL_SMALLMAP # endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION public: Value( ValueType type = nullValue ); @@ -454,7 +458,13 @@ namespace Json { Args args_; }; - + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ class ValueAllocator { public: @@ -462,10 +472,11 @@ namespace Json { virtual ~ValueAllocator(); - virtual char *makeMemberName() = 0; - virtual void releaseMemberName( char * ) = 0; - virtual char *duplicateValue( const char *value, unsigned int length = unknown ) = 0; - virtual void releaseValue( char *value ) = 0; + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; }; #ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -526,7 +537,7 @@ namespace Json { virtual void releaseMapLink( ValueInternalLink *link ) = 0; }; - /** \brief Link of hash-map used to store arrayValue and objectValue. + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). * \internal previous_ & next_ allows for bidirectional traversal. */ class JSON_API ValueInternalLink @@ -538,28 +549,9 @@ namespace Json { flagUsed = 1 }; - /** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ - ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) - { - } + ValueInternalLink(); - ~ValueInternalLink() - { - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } - } + ~ValueInternalLink(); Value items_[itemPerLink]; char *keys_[itemPerLink]; @@ -568,6 +560,18 @@ namespace Json { }; + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ class JSON_API ValueInternalMap { friend class ValueIteratorBase; @@ -576,6 +580,7 @@ namespace Json { typedef unsigned int HashKey; typedef unsigned int BucketIndex; +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION struct IteratorState { ValueInternalMap *map_; @@ -583,14 +588,7 @@ namespace Json { BucketIndex itemIndex_; BucketIndex bucketIndex_; }; - - /* When the bucket is allocated, one page is immediately allocated for each bucket. - Each bucket is made up of a chained list of ValueInternalLink. The last - link of a given bucket can be found in the 'previous_' field of the following bucket. - The last link of the last bucket is stored in tailLink_ as it has no following bucket. - Only the last link of a bucket may contains 'available' item. The last link always - contains at least one element unless is it the bucket one very first link. - */ +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION ValueInternalMap(); ValueInternalMap( const ValueInternalMap &other ); @@ -674,12 +672,14 @@ namespace Json { typedef Value::ArrayIndex ArrayIndex; typedef unsigned int PageIndex; +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION struct IteratorState // Must be a POD { ValueInternalArray *array_; Value **currentPageIndex_; unsigned int currentItemIndex_; }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION ValueInternalArray(); ValueInternalArray( const ValueInternalArray &other ); diff --git a/src/lib_json/json_batchallocator.h b/src/lib_json/json_batchallocator.h index 5db9db4..6744506 100644 --- a/src/lib_json/json_batchallocator.h +++ b/src/lib_json/json_batchallocator.h @@ -4,6 +4,8 @@ # include # include +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + namespace Json { /* Fast memory allocator. @@ -117,5 +119,7 @@ class BatchAllocator } // namespace Json +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + #endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED diff --git a/src/lib_json/json_internalmap.inl b/src/lib_json/json_internalmap.inl index 9ff1d65..8e60b46 100644 --- a/src/lib_json/json_internalmap.inl +++ b/src/lib_json/json_internalmap.inl @@ -9,6 +9,31 @@ // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + ValueMapAllocator::~ValueMapAllocator() { } @@ -390,7 +415,7 @@ ValueInternalMap::setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ) { - char *duplicatedKey = safeStringDup( key ); + char *duplicatedKey = valueAllocator()->makeMemberName( key ); ++itemCount_; link->keys_[index] = duplicatedKey; link->items_[index].setItemUsed(); diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index da575b8..1678117 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -21,40 +21,91 @@ const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); const Value::UInt Value::maxUInt = Value::UInt(-1); +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + ValueAllocator::~ValueAllocator() { } +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. - -inline char *safeStringDup( const char *czstring ) -{ - if ( czstring ) + virtual void releaseMemberName( char *memberName ) { - const size_t length = (unsigned int)( strlen(czstring) + 1 ); - char *newString = static_cast( malloc( length ) ); - memcpy( newString, czstring, length ); - return newString; + releaseStringValue( memberName ); } - return 0; -} -inline char *safeStringDup( const std::string &str ) -{ - if ( !str.empty() ) + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) { - const size_t length = str.length(); + if ( !value || value[0] == 0 ) + return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, str.c_str(), length ); + memcpy( newString, value, length ); newString[length] = 0; return newString; } - return 0; + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; } +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -88,7 +139,7 @@ Value::CommentInfo::CommentInfo() Value::CommentInfo::~CommentInfo() { if ( comment_ ) - free( comment_ ); + valueAllocator()->releaseStringValue( comment_ ); } @@ -96,8 +147,8 @@ void Value::CommentInfo::setComment( const char *text ) { if ( comment_ ) - free( comment_ ); - comment_ = safeStringDup( text ); + valueAllocator()->releaseStringValue( comment_ ); + comment_ = valueAllocator()->duplicateStringValue( text ); } @@ -120,14 +171,16 @@ Value::CZString::CZString( int index ) } Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? safeStringDup(cstr) : cstr ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) , index_( allocate ) { } Value::CZString::CZString( const CZString &other ) - : cstr_( other.index_ != noDuplication && other.cstr_ != 0 ? safeStringDup( other.cstr_ ) - : other.cstr_ ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) : other.index_ ) { @@ -136,7 +189,7 @@ Value::CZString::CZString( const CZString &other ) Value::CZString::~CZString() { if ( cstr_ && index_ == duplicate ) - free( const_cast( cstr_ ) ); + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); } void @@ -288,7 +341,7 @@ Value::Value( const char *value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = safeStringDup( value ); + value_.string_ = valueAllocator()->duplicateStringValue( value ); } Value::Value( const std::string &value ) @@ -299,7 +352,8 @@ Value::Value( const std::string &value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = safeStringDup( value ); + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); } @@ -324,7 +378,7 @@ Value::Value( const CppTL::ConstString &value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = safeStringDup( value ); + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); } # endif @@ -358,7 +412,7 @@ Value::Value( const Value &other ) case stringValue: if ( other.value_.string_ ) { - value_.string_ = safeStringDup( other.value_.string_ ); + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); allocated_ = true; } else @@ -405,7 +459,7 @@ Value::~Value() break; case stringValue: if ( allocated_ ) - free( value_.string_ ); + valueAllocator()->releaseStringValue( value_.string_ ); break; #ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: @@ -472,7 +526,6 @@ Value::compare( const Value &other ) case booleanValue: break; case stringValue, - free( value_.string_ ); break; case arrayValue: delete value_.array_; From f05e034d7581d187326a641bc0efcca7c5a2382c Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 26 Jun 2006 20:29:12 +0000 Subject: [PATCH 023/268] - added test to check empty member name { "" : 134 }. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@23 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- src/lib_json/json_value.cpp | 5 +++-- test/test_object_04.expected | 2 ++ test/test_object_04.json | 3 +++ 3 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 test/test_object_04.expected create mode 100644 test/test_object_04.json diff --git a/src/lib_json/json_value.cpp b/src/lib_json/json_value.cpp index 1678117..5a59956 100644 --- a/src/lib_json/json_value.cpp +++ b/src/lib_json/json_value.cpp @@ -73,8 +73,9 @@ class DefaultValueAllocator : public ValueAllocator virtual char *duplicateStringValue( const char *value, unsigned int length = unknown ) { - if ( !value || value[0] == 0 ) - return 0; + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; if ( length == unknown ) length = (unsigned int)strlen(value); diff --git a/test/test_object_04.expected b/test/test_object_04.expected new file mode 100644 index 0000000..cf4d7c3 --- /dev/null +++ b/test/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/test/test_object_04.json b/test/test_object_04.json new file mode 100644 index 0000000..f1e364a --- /dev/null +++ b/test/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} From 96c93fd276fea602784e9ce624629dde9f92c2a2 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 15 Jul 2006 18:40:16 +0000 Subject: [PATCH 024/268] - removed some warnings when compiling with gcc 4.1. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@24 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- include/json/config.h | 2 +- src/lib_json/json_batchallocator.h | 6 +++--- src/lib_json/json_reader.cpp | 2 +- src/lib_json/json_writer.cpp | 1 - 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/include/json/config.h b/include/json/config.h index ca3f999..c1bcb3f 100644 --- a/include/json/config.h +++ b/include/json/config.h @@ -12,7 +12,7 @@ /// If defined, indicates that Json specific container should be used /// (hash table & simple deque container with customizable allocator). /// THIS FEATURE IS STILL EXPERIMENTAL! -# define JSON_VALUE_USE_INTERNAL_MAP 1 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 /// Force usage of standard new/malloc based allocator instead of memory pool based allocator. /// The memory pools allocator used optimization (initializing Value and ValueInternalLink /// as if it was a POD) that may cause some validation tool to report errors. diff --git a/src/lib_json/json_batchallocator.h b/src/lib_json/json_batchallocator.h index 6744506..1e35c19 100644 --- a/src/lib_json/json_batchallocator.h +++ b/src/lib_json/json_batchallocator.h @@ -25,11 +25,11 @@ template %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. diff --git a/src/lib_json/json_reader.cpp b/src/lib_json/json_reader.cpp index cfdcfca..9d0e665 100644 --- a/src/lib_json/json_reader.cpp +++ b/src/lib_json/json_reader.cpp @@ -583,7 +583,7 @@ Reader::decodeUnicodeEscapeSequence( Token &token, { Char c = *current++; unicode *= 16; - if ( c >=0 && c <= 9 ) + if ( c >= '0' && c <= '9' ) unicode += c - '0'; else if ( c >= 'a' && c <= 'f' ) unicode += c - 'a' + 10; diff --git a/src/lib_json/json_writer.cpp b/src/lib_json/json_writer.cpp index f47e3f9..f1d8168 100644 --- a/src/lib_json/json_writer.cpp +++ b/src/lib_json/json_writer.cpp @@ -13,7 +13,6 @@ static void uintToString( unsigned int value, char *¤t ) { *--current = 0; - char *end = current; do { *--current = (value % 10) + '0'; From 93203670a8f2af60c825806a32ead731e18ec5d3 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 15 Jul 2006 18:42:11 +0000 Subject: [PATCH 025/268] - ignore VTune paths. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@25 1f120ed1-78a5-a849-adca-83f0a9e25bb6 From fef92a3f8b2a27e916783b9125170a6cd9410b9f Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 1 Sep 2006 19:42:11 +0000 Subject: [PATCH 026/268] - fixed dist target build. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@26 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- SConstruct | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SConstruct b/SConstruct index e9c507e..d73b629 100644 --- a/SConstruct +++ b/SConstruct @@ -162,7 +162,7 @@ RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) env.Alias( 'check' ) srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README SConstruct + AUTHORS README.txt SConstruct """.split() ) env.Alias( 'src-dist', srcdist_cmd ) From 904a9f9260c18ae528baa95e251ab62813c695e6 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 15 Mar 2007 22:04:34 +0000 Subject: [PATCH 027/268] - reorganizing repository to match standard layout git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@27 1f120ed1-78a5-a849-adca-83f0a9e25bb6 From c272de48952ad246232fb11de02d2c737c775cda Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 15 Mar 2007 22:11:38 +0000 Subject: [PATCH 028/268] - reorganized repository to match standard layout git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@28 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- AUTHORS => trunk/jsoncpp/AUTHORS | 0 README.txt => trunk/jsoncpp/README.txt | 0 SConstruct => trunk/jsoncpp/SConstruct | 0 {doc => trunk/jsoncpp/doc}/doxyfile.in | 0 {doc => trunk/jsoncpp/doc}/footer.html | 0 {doc => trunk/jsoncpp/doc}/header.html | 0 {doc => trunk/jsoncpp/doc}/jsoncpp.dox | 0 {doc => trunk/jsoncpp/doc}/readme.txt | 0 {doc => trunk/jsoncpp/doc}/sconscript | 0 {include => trunk/jsoncpp/include}/json/autolink.h | 0 {include => trunk/jsoncpp/include}/json/config.h | 0 {include => trunk/jsoncpp/include}/json/forwards.h | 0 {include => trunk/jsoncpp/include}/json/json.h | 0 {include => trunk/jsoncpp/include}/json/reader.h | 0 {include => trunk/jsoncpp/include}/json/value.h | 0 {include => trunk/jsoncpp/include}/json/writer.h | 0 {makefiles => trunk/jsoncpp/makefiles}/vs71/jsoncpp.sln | 0 {scons-tools => trunk/jsoncpp/scons-tools}/doxygen.py | 0 {scons-tools => trunk/jsoncpp/scons-tools}/srcdist.py | 0 {scons-tools => trunk/jsoncpp/scons-tools}/substinfile.py | 0 {scons-tools => trunk/jsoncpp/scons-tools}/targz.py | 0 {src => trunk/jsoncpp/src}/jsontestrunner/jsontest.vcproj | 0 {src => trunk/jsoncpp/src}/jsontestrunner/main.cpp | 0 {src => trunk/jsoncpp/src}/jsontestrunner/sconscript | 0 {src => trunk/jsoncpp/src}/lib_json/json_batchallocator.h | 0 {src => trunk/jsoncpp/src}/lib_json/json_internalarray.inl | 0 {src => trunk/jsoncpp/src}/lib_json/json_internalmap.inl | 0 {src => trunk/jsoncpp/src}/lib_json/json_reader.cpp | 0 {src => trunk/jsoncpp/src}/lib_json/json_value.cpp | 0 {src => trunk/jsoncpp/src}/lib_json/json_valueiterator.inl | 0 {src => trunk/jsoncpp/src}/lib_json/json_writer.cpp | 0 {src => trunk/jsoncpp/src}/lib_json/lib_json.vcproj | 0 {src => trunk/jsoncpp/src}/lib_json/sconscript | 0 {test => trunk/jsoncpp/test}/cleantests.py | 0 {test => trunk/jsoncpp/test}/generate_expected.py | 0 {test => trunk/jsoncpp/test}/jsontestrunner.py | 0 {test => trunk/jsoncpp/test}/runjsontests.py | 0 {test => trunk/jsoncpp/test}/test_array_01.expected | 0 {test => trunk/jsoncpp/test}/test_array_01.json | 0 {test => trunk/jsoncpp/test}/test_array_02.expected | 0 {test => trunk/jsoncpp/test}/test_array_02.json | 0 {test => trunk/jsoncpp/test}/test_array_03.expected | 0 {test => trunk/jsoncpp/test}/test_array_03.json | 0 {test => trunk/jsoncpp/test}/test_array_04.expected | 0 {test => trunk/jsoncpp/test}/test_array_04.json | 0 {test => trunk/jsoncpp/test}/test_array_05.expected | 0 {test => trunk/jsoncpp/test}/test_array_05.json | 0 {test => trunk/jsoncpp/test}/test_array_06.expected | 0 {test => trunk/jsoncpp/test}/test_array_06.json | 0 {test => trunk/jsoncpp/test}/test_basic_01.expected | 0 {test => trunk/jsoncpp/test}/test_basic_01.json | 0 {test => trunk/jsoncpp/test}/test_basic_02.expected | 0 {test => trunk/jsoncpp/test}/test_basic_02.json | 0 {test => trunk/jsoncpp/test}/test_basic_03.expected | 0 {test => trunk/jsoncpp/test}/test_basic_03.json | 0 {test => trunk/jsoncpp/test}/test_basic_04.expected | 0 {test => trunk/jsoncpp/test}/test_basic_04.json | 0 {test => trunk/jsoncpp/test}/test_basic_05.expected | 0 {test => trunk/jsoncpp/test}/test_basic_05.json | 0 {test => trunk/jsoncpp/test}/test_basic_06.expected | 0 {test => trunk/jsoncpp/test}/test_basic_06.json | 0 {test => trunk/jsoncpp/test}/test_basic_07.expected | 0 {test => trunk/jsoncpp/test}/test_basic_07.json | 0 {test => trunk/jsoncpp/test}/test_basic_08.expected | 0 {test => trunk/jsoncpp/test}/test_basic_08.json | 0 {test => trunk/jsoncpp/test}/test_basic_09.expected | 0 {test => trunk/jsoncpp/test}/test_basic_09.json | 0 {test => trunk/jsoncpp/test}/test_complex_01.expected | 0 {test => trunk/jsoncpp/test}/test_complex_01.json | 0 {test => trunk/jsoncpp/test}/test_integer_01.expected | 0 {test => trunk/jsoncpp/test}/test_integer_01.json | 0 {test => trunk/jsoncpp/test}/test_integer_02.expected | 0 {test => trunk/jsoncpp/test}/test_integer_02.json | 0 {test => trunk/jsoncpp/test}/test_integer_03.expected | 0 {test => trunk/jsoncpp/test}/test_integer_03.json | 0 {test => trunk/jsoncpp/test}/test_integer_04.expected | 0 {test => trunk/jsoncpp/test}/test_integer_04.json | 0 {test => trunk/jsoncpp/test}/test_integer_05.expected | 0 {test => trunk/jsoncpp/test}/test_integer_05.json | 0 {test => trunk/jsoncpp/test}/test_object_01.expected | 0 {test => trunk/jsoncpp/test}/test_object_01.json | 0 {test => trunk/jsoncpp/test}/test_object_02.expected | 0 {test => trunk/jsoncpp/test}/test_object_02.json | 0 {test => trunk/jsoncpp/test}/test_object_03.expected | 0 {test => trunk/jsoncpp/test}/test_object_03.json | 0 {test => trunk/jsoncpp/test}/test_object_04.expected | 0 {test => trunk/jsoncpp/test}/test_object_04.json | 0 {test => trunk/jsoncpp/test}/test_preserve_comment_01.expected | 0 {test => trunk/jsoncpp/test}/test_preserve_comment_01.json | 0 {test => trunk/jsoncpp/test}/test_real_01.expected | 0 {test => trunk/jsoncpp/test}/test_real_01.json | 0 {test => trunk/jsoncpp/test}/test_real_02.expected | 0 {test => trunk/jsoncpp/test}/test_real_02.json | 0 {test => trunk/jsoncpp/test}/test_real_03.expected | 0 {test => trunk/jsoncpp/test}/test_real_03.json | 0 {test => trunk/jsoncpp/test}/test_real_04.expected | 0 {test => trunk/jsoncpp/test}/test_real_04.json | 0 {test => trunk/jsoncpp/test}/test_real_05.expected | 0 {test => trunk/jsoncpp/test}/test_real_05.json | 0 {test => trunk/jsoncpp/test}/test_real_06.expected | 0 {test => trunk/jsoncpp/test}/test_real_06.json | 0 {test => trunk/jsoncpp/test}/test_real_07.expected | 0 {test => trunk/jsoncpp/test}/test_real_07.json | 0 103 files changed, 0 insertions(+), 0 deletions(-) rename AUTHORS => trunk/jsoncpp/AUTHORS (100%) rename README.txt => trunk/jsoncpp/README.txt (100%) rename SConstruct => trunk/jsoncpp/SConstruct (100%) rename {doc => trunk/jsoncpp/doc}/doxyfile.in (100%) rename {doc => trunk/jsoncpp/doc}/footer.html (100%) rename {doc => trunk/jsoncpp/doc}/header.html (100%) rename {doc => trunk/jsoncpp/doc}/jsoncpp.dox (100%) rename {doc => trunk/jsoncpp/doc}/readme.txt (100%) rename {doc => trunk/jsoncpp/doc}/sconscript (100%) rename {include => trunk/jsoncpp/include}/json/autolink.h (100%) rename {include => trunk/jsoncpp/include}/json/config.h (100%) rename {include => trunk/jsoncpp/include}/json/forwards.h (100%) rename {include => trunk/jsoncpp/include}/json/json.h (100%) rename {include => trunk/jsoncpp/include}/json/reader.h (100%) rename {include => trunk/jsoncpp/include}/json/value.h (100%) rename {include => trunk/jsoncpp/include}/json/writer.h (100%) rename {makefiles => trunk/jsoncpp/makefiles}/vs71/jsoncpp.sln (100%) rename {scons-tools => trunk/jsoncpp/scons-tools}/doxygen.py (100%) rename {scons-tools => trunk/jsoncpp/scons-tools}/srcdist.py (100%) rename {scons-tools => trunk/jsoncpp/scons-tools}/substinfile.py (100%) rename {scons-tools => trunk/jsoncpp/scons-tools}/targz.py (100%) rename {src => trunk/jsoncpp/src}/jsontestrunner/jsontest.vcproj (100%) rename {src => trunk/jsoncpp/src}/jsontestrunner/main.cpp (100%) rename {src => trunk/jsoncpp/src}/jsontestrunner/sconscript (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_batchallocator.h (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_internalarray.inl (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_internalmap.inl (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_reader.cpp (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_value.cpp (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_valueiterator.inl (100%) rename {src => trunk/jsoncpp/src}/lib_json/json_writer.cpp (100%) rename {src => trunk/jsoncpp/src}/lib_json/lib_json.vcproj (100%) rename {src => trunk/jsoncpp/src}/lib_json/sconscript (100%) rename {test => trunk/jsoncpp/test}/cleantests.py (100%) rename {test => trunk/jsoncpp/test}/generate_expected.py (100%) rename {test => trunk/jsoncpp/test}/jsontestrunner.py (100%) rename {test => trunk/jsoncpp/test}/runjsontests.py (100%) rename {test => trunk/jsoncpp/test}/test_array_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_01.json (100%) rename {test => trunk/jsoncpp/test}/test_array_02.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_02.json (100%) rename {test => trunk/jsoncpp/test}/test_array_03.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_03.json (100%) rename {test => trunk/jsoncpp/test}/test_array_04.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_04.json (100%) rename {test => trunk/jsoncpp/test}/test_array_05.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_05.json (100%) rename {test => trunk/jsoncpp/test}/test_array_06.expected (100%) rename {test => trunk/jsoncpp/test}/test_array_06.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_01.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_02.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_02.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_03.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_03.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_04.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_04.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_05.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_05.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_06.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_06.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_07.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_07.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_08.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_08.json (100%) rename {test => trunk/jsoncpp/test}/test_basic_09.expected (100%) rename {test => trunk/jsoncpp/test}/test_basic_09.json (100%) rename {test => trunk/jsoncpp/test}/test_complex_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_complex_01.json (100%) rename {test => trunk/jsoncpp/test}/test_integer_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_integer_01.json (100%) rename {test => trunk/jsoncpp/test}/test_integer_02.expected (100%) rename {test => trunk/jsoncpp/test}/test_integer_02.json (100%) rename {test => trunk/jsoncpp/test}/test_integer_03.expected (100%) rename {test => trunk/jsoncpp/test}/test_integer_03.json (100%) rename {test => trunk/jsoncpp/test}/test_integer_04.expected (100%) rename {test => trunk/jsoncpp/test}/test_integer_04.json (100%) rename {test => trunk/jsoncpp/test}/test_integer_05.expected (100%) rename {test => trunk/jsoncpp/test}/test_integer_05.json (100%) rename {test => trunk/jsoncpp/test}/test_object_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_object_01.json (100%) rename {test => trunk/jsoncpp/test}/test_object_02.expected (100%) rename {test => trunk/jsoncpp/test}/test_object_02.json (100%) rename {test => trunk/jsoncpp/test}/test_object_03.expected (100%) rename {test => trunk/jsoncpp/test}/test_object_03.json (100%) rename {test => trunk/jsoncpp/test}/test_object_04.expected (100%) rename {test => trunk/jsoncpp/test}/test_object_04.json (100%) rename {test => trunk/jsoncpp/test}/test_preserve_comment_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_preserve_comment_01.json (100%) rename {test => trunk/jsoncpp/test}/test_real_01.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_01.json (100%) rename {test => trunk/jsoncpp/test}/test_real_02.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_02.json (100%) rename {test => trunk/jsoncpp/test}/test_real_03.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_03.json (100%) rename {test => trunk/jsoncpp/test}/test_real_04.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_04.json (100%) rename {test => trunk/jsoncpp/test}/test_real_05.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_05.json (100%) rename {test => trunk/jsoncpp/test}/test_real_06.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_06.json (100%) rename {test => trunk/jsoncpp/test}/test_real_07.expected (100%) rename {test => trunk/jsoncpp/test}/test_real_07.json (100%) diff --git a/AUTHORS b/trunk/jsoncpp/AUTHORS similarity index 100% rename from AUTHORS rename to trunk/jsoncpp/AUTHORS diff --git a/README.txt b/trunk/jsoncpp/README.txt similarity index 100% rename from README.txt rename to trunk/jsoncpp/README.txt diff --git a/SConstruct b/trunk/jsoncpp/SConstruct similarity index 100% rename from SConstruct rename to trunk/jsoncpp/SConstruct diff --git a/doc/doxyfile.in b/trunk/jsoncpp/doc/doxyfile.in similarity index 100% rename from doc/doxyfile.in rename to trunk/jsoncpp/doc/doxyfile.in diff --git a/doc/footer.html b/trunk/jsoncpp/doc/footer.html similarity index 100% rename from doc/footer.html rename to trunk/jsoncpp/doc/footer.html diff --git a/doc/header.html b/trunk/jsoncpp/doc/header.html similarity index 100% rename from doc/header.html rename to trunk/jsoncpp/doc/header.html diff --git a/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox similarity index 100% rename from doc/jsoncpp.dox rename to trunk/jsoncpp/doc/jsoncpp.dox diff --git a/doc/readme.txt b/trunk/jsoncpp/doc/readme.txt similarity index 100% rename from doc/readme.txt rename to trunk/jsoncpp/doc/readme.txt diff --git a/doc/sconscript b/trunk/jsoncpp/doc/sconscript similarity index 100% rename from doc/sconscript rename to trunk/jsoncpp/doc/sconscript diff --git a/include/json/autolink.h b/trunk/jsoncpp/include/json/autolink.h similarity index 100% rename from include/json/autolink.h rename to trunk/jsoncpp/include/json/autolink.h diff --git a/include/json/config.h b/trunk/jsoncpp/include/json/config.h similarity index 100% rename from include/json/config.h rename to trunk/jsoncpp/include/json/config.h diff --git a/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h similarity index 100% rename from include/json/forwards.h rename to trunk/jsoncpp/include/json/forwards.h diff --git a/include/json/json.h b/trunk/jsoncpp/include/json/json.h similarity index 100% rename from include/json/json.h rename to trunk/jsoncpp/include/json/json.h diff --git a/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h similarity index 100% rename from include/json/reader.h rename to trunk/jsoncpp/include/json/reader.h diff --git a/include/json/value.h b/trunk/jsoncpp/include/json/value.h similarity index 100% rename from include/json/value.h rename to trunk/jsoncpp/include/json/value.h diff --git a/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h similarity index 100% rename from include/json/writer.h rename to trunk/jsoncpp/include/json/writer.h diff --git a/makefiles/vs71/jsoncpp.sln b/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln similarity index 100% rename from makefiles/vs71/jsoncpp.sln rename to trunk/jsoncpp/makefiles/vs71/jsoncpp.sln diff --git a/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py similarity index 100% rename from scons-tools/doxygen.py rename to trunk/jsoncpp/scons-tools/doxygen.py diff --git a/scons-tools/srcdist.py b/trunk/jsoncpp/scons-tools/srcdist.py similarity index 100% rename from scons-tools/srcdist.py rename to trunk/jsoncpp/scons-tools/srcdist.py diff --git a/scons-tools/substinfile.py b/trunk/jsoncpp/scons-tools/substinfile.py similarity index 100% rename from scons-tools/substinfile.py rename to trunk/jsoncpp/scons-tools/substinfile.py diff --git a/scons-tools/targz.py b/trunk/jsoncpp/scons-tools/targz.py similarity index 100% rename from scons-tools/targz.py rename to trunk/jsoncpp/scons-tools/targz.py diff --git a/src/jsontestrunner/jsontest.vcproj b/trunk/jsoncpp/src/jsontestrunner/jsontest.vcproj similarity index 100% rename from src/jsontestrunner/jsontest.vcproj rename to trunk/jsoncpp/src/jsontestrunner/jsontest.vcproj diff --git a/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp similarity index 100% rename from src/jsontestrunner/main.cpp rename to trunk/jsoncpp/src/jsontestrunner/main.cpp diff --git a/src/jsontestrunner/sconscript b/trunk/jsoncpp/src/jsontestrunner/sconscript similarity index 100% rename from src/jsontestrunner/sconscript rename to trunk/jsoncpp/src/jsontestrunner/sconscript diff --git a/src/lib_json/json_batchallocator.h b/trunk/jsoncpp/src/lib_json/json_batchallocator.h similarity index 100% rename from src/lib_json/json_batchallocator.h rename to trunk/jsoncpp/src/lib_json/json_batchallocator.h diff --git a/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl similarity index 100% rename from src/lib_json/json_internalarray.inl rename to trunk/jsoncpp/src/lib_json/json_internalarray.inl diff --git a/src/lib_json/json_internalmap.inl b/trunk/jsoncpp/src/lib_json/json_internalmap.inl similarity index 100% rename from src/lib_json/json_internalmap.inl rename to trunk/jsoncpp/src/lib_json/json_internalmap.inl diff --git a/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp similarity index 100% rename from src/lib_json/json_reader.cpp rename to trunk/jsoncpp/src/lib_json/json_reader.cpp diff --git a/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp similarity index 100% rename from src/lib_json/json_value.cpp rename to trunk/jsoncpp/src/lib_json/json_value.cpp diff --git a/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl similarity index 100% rename from src/lib_json/json_valueiterator.inl rename to trunk/jsoncpp/src/lib_json/json_valueiterator.inl diff --git a/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp similarity index 100% rename from src/lib_json/json_writer.cpp rename to trunk/jsoncpp/src/lib_json/json_writer.cpp diff --git a/src/lib_json/lib_json.vcproj b/trunk/jsoncpp/src/lib_json/lib_json.vcproj similarity index 100% rename from src/lib_json/lib_json.vcproj rename to trunk/jsoncpp/src/lib_json/lib_json.vcproj diff --git a/src/lib_json/sconscript b/trunk/jsoncpp/src/lib_json/sconscript similarity index 100% rename from src/lib_json/sconscript rename to trunk/jsoncpp/src/lib_json/sconscript diff --git a/test/cleantests.py b/trunk/jsoncpp/test/cleantests.py similarity index 100% rename from test/cleantests.py rename to trunk/jsoncpp/test/cleantests.py diff --git a/test/generate_expected.py b/trunk/jsoncpp/test/generate_expected.py similarity index 100% rename from test/generate_expected.py rename to trunk/jsoncpp/test/generate_expected.py diff --git a/test/jsontestrunner.py b/trunk/jsoncpp/test/jsontestrunner.py similarity index 100% rename from test/jsontestrunner.py rename to trunk/jsoncpp/test/jsontestrunner.py diff --git a/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py similarity index 100% rename from test/runjsontests.py rename to trunk/jsoncpp/test/runjsontests.py diff --git a/test/test_array_01.expected b/trunk/jsoncpp/test/test_array_01.expected similarity index 100% rename from test/test_array_01.expected rename to trunk/jsoncpp/test/test_array_01.expected diff --git a/test/test_array_01.json b/trunk/jsoncpp/test/test_array_01.json similarity index 100% rename from test/test_array_01.json rename to trunk/jsoncpp/test/test_array_01.json diff --git a/test/test_array_02.expected b/trunk/jsoncpp/test/test_array_02.expected similarity index 100% rename from test/test_array_02.expected rename to trunk/jsoncpp/test/test_array_02.expected diff --git a/test/test_array_02.json b/trunk/jsoncpp/test/test_array_02.json similarity index 100% rename from test/test_array_02.json rename to trunk/jsoncpp/test/test_array_02.json diff --git a/test/test_array_03.expected b/trunk/jsoncpp/test/test_array_03.expected similarity index 100% rename from test/test_array_03.expected rename to trunk/jsoncpp/test/test_array_03.expected diff --git a/test/test_array_03.json b/trunk/jsoncpp/test/test_array_03.json similarity index 100% rename from test/test_array_03.json rename to trunk/jsoncpp/test/test_array_03.json diff --git a/test/test_array_04.expected b/trunk/jsoncpp/test/test_array_04.expected similarity index 100% rename from test/test_array_04.expected rename to trunk/jsoncpp/test/test_array_04.expected diff --git a/test/test_array_04.json b/trunk/jsoncpp/test/test_array_04.json similarity index 100% rename from test/test_array_04.json rename to trunk/jsoncpp/test/test_array_04.json diff --git a/test/test_array_05.expected b/trunk/jsoncpp/test/test_array_05.expected similarity index 100% rename from test/test_array_05.expected rename to trunk/jsoncpp/test/test_array_05.expected diff --git a/test/test_array_05.json b/trunk/jsoncpp/test/test_array_05.json similarity index 100% rename from test/test_array_05.json rename to trunk/jsoncpp/test/test_array_05.json diff --git a/test/test_array_06.expected b/trunk/jsoncpp/test/test_array_06.expected similarity index 100% rename from test/test_array_06.expected rename to trunk/jsoncpp/test/test_array_06.expected diff --git a/test/test_array_06.json b/trunk/jsoncpp/test/test_array_06.json similarity index 100% rename from test/test_array_06.json rename to trunk/jsoncpp/test/test_array_06.json diff --git a/test/test_basic_01.expected b/trunk/jsoncpp/test/test_basic_01.expected similarity index 100% rename from test/test_basic_01.expected rename to trunk/jsoncpp/test/test_basic_01.expected diff --git a/test/test_basic_01.json b/trunk/jsoncpp/test/test_basic_01.json similarity index 100% rename from test/test_basic_01.json rename to trunk/jsoncpp/test/test_basic_01.json diff --git a/test/test_basic_02.expected b/trunk/jsoncpp/test/test_basic_02.expected similarity index 100% rename from test/test_basic_02.expected rename to trunk/jsoncpp/test/test_basic_02.expected diff --git a/test/test_basic_02.json b/trunk/jsoncpp/test/test_basic_02.json similarity index 100% rename from test/test_basic_02.json rename to trunk/jsoncpp/test/test_basic_02.json diff --git a/test/test_basic_03.expected b/trunk/jsoncpp/test/test_basic_03.expected similarity index 100% rename from test/test_basic_03.expected rename to trunk/jsoncpp/test/test_basic_03.expected diff --git a/test/test_basic_03.json b/trunk/jsoncpp/test/test_basic_03.json similarity index 100% rename from test/test_basic_03.json rename to trunk/jsoncpp/test/test_basic_03.json diff --git a/test/test_basic_04.expected b/trunk/jsoncpp/test/test_basic_04.expected similarity index 100% rename from test/test_basic_04.expected rename to trunk/jsoncpp/test/test_basic_04.expected diff --git a/test/test_basic_04.json b/trunk/jsoncpp/test/test_basic_04.json similarity index 100% rename from test/test_basic_04.json rename to trunk/jsoncpp/test/test_basic_04.json diff --git a/test/test_basic_05.expected b/trunk/jsoncpp/test/test_basic_05.expected similarity index 100% rename from test/test_basic_05.expected rename to trunk/jsoncpp/test/test_basic_05.expected diff --git a/test/test_basic_05.json b/trunk/jsoncpp/test/test_basic_05.json similarity index 100% rename from test/test_basic_05.json rename to trunk/jsoncpp/test/test_basic_05.json diff --git a/test/test_basic_06.expected b/trunk/jsoncpp/test/test_basic_06.expected similarity index 100% rename from test/test_basic_06.expected rename to trunk/jsoncpp/test/test_basic_06.expected diff --git a/test/test_basic_06.json b/trunk/jsoncpp/test/test_basic_06.json similarity index 100% rename from test/test_basic_06.json rename to trunk/jsoncpp/test/test_basic_06.json diff --git a/test/test_basic_07.expected b/trunk/jsoncpp/test/test_basic_07.expected similarity index 100% rename from test/test_basic_07.expected rename to trunk/jsoncpp/test/test_basic_07.expected diff --git a/test/test_basic_07.json b/trunk/jsoncpp/test/test_basic_07.json similarity index 100% rename from test/test_basic_07.json rename to trunk/jsoncpp/test/test_basic_07.json diff --git a/test/test_basic_08.expected b/trunk/jsoncpp/test/test_basic_08.expected similarity index 100% rename from test/test_basic_08.expected rename to trunk/jsoncpp/test/test_basic_08.expected diff --git a/test/test_basic_08.json b/trunk/jsoncpp/test/test_basic_08.json similarity index 100% rename from test/test_basic_08.json rename to trunk/jsoncpp/test/test_basic_08.json diff --git a/test/test_basic_09.expected b/trunk/jsoncpp/test/test_basic_09.expected similarity index 100% rename from test/test_basic_09.expected rename to trunk/jsoncpp/test/test_basic_09.expected diff --git a/test/test_basic_09.json b/trunk/jsoncpp/test/test_basic_09.json similarity index 100% rename from test/test_basic_09.json rename to trunk/jsoncpp/test/test_basic_09.json diff --git a/test/test_complex_01.expected b/trunk/jsoncpp/test/test_complex_01.expected similarity index 100% rename from test/test_complex_01.expected rename to trunk/jsoncpp/test/test_complex_01.expected diff --git a/test/test_complex_01.json b/trunk/jsoncpp/test/test_complex_01.json similarity index 100% rename from test/test_complex_01.json rename to trunk/jsoncpp/test/test_complex_01.json diff --git a/test/test_integer_01.expected b/trunk/jsoncpp/test/test_integer_01.expected similarity index 100% rename from test/test_integer_01.expected rename to trunk/jsoncpp/test/test_integer_01.expected diff --git a/test/test_integer_01.json b/trunk/jsoncpp/test/test_integer_01.json similarity index 100% rename from test/test_integer_01.json rename to trunk/jsoncpp/test/test_integer_01.json diff --git a/test/test_integer_02.expected b/trunk/jsoncpp/test/test_integer_02.expected similarity index 100% rename from test/test_integer_02.expected rename to trunk/jsoncpp/test/test_integer_02.expected diff --git a/test/test_integer_02.json b/trunk/jsoncpp/test/test_integer_02.json similarity index 100% rename from test/test_integer_02.json rename to trunk/jsoncpp/test/test_integer_02.json diff --git a/test/test_integer_03.expected b/trunk/jsoncpp/test/test_integer_03.expected similarity index 100% rename from test/test_integer_03.expected rename to trunk/jsoncpp/test/test_integer_03.expected diff --git a/test/test_integer_03.json b/trunk/jsoncpp/test/test_integer_03.json similarity index 100% rename from test/test_integer_03.json rename to trunk/jsoncpp/test/test_integer_03.json diff --git a/test/test_integer_04.expected b/trunk/jsoncpp/test/test_integer_04.expected similarity index 100% rename from test/test_integer_04.expected rename to trunk/jsoncpp/test/test_integer_04.expected diff --git a/test/test_integer_04.json b/trunk/jsoncpp/test/test_integer_04.json similarity index 100% rename from test/test_integer_04.json rename to trunk/jsoncpp/test/test_integer_04.json diff --git a/test/test_integer_05.expected b/trunk/jsoncpp/test/test_integer_05.expected similarity index 100% rename from test/test_integer_05.expected rename to trunk/jsoncpp/test/test_integer_05.expected diff --git a/test/test_integer_05.json b/trunk/jsoncpp/test/test_integer_05.json similarity index 100% rename from test/test_integer_05.json rename to trunk/jsoncpp/test/test_integer_05.json diff --git a/test/test_object_01.expected b/trunk/jsoncpp/test/test_object_01.expected similarity index 100% rename from test/test_object_01.expected rename to trunk/jsoncpp/test/test_object_01.expected diff --git a/test/test_object_01.json b/trunk/jsoncpp/test/test_object_01.json similarity index 100% rename from test/test_object_01.json rename to trunk/jsoncpp/test/test_object_01.json diff --git a/test/test_object_02.expected b/trunk/jsoncpp/test/test_object_02.expected similarity index 100% rename from test/test_object_02.expected rename to trunk/jsoncpp/test/test_object_02.expected diff --git a/test/test_object_02.json b/trunk/jsoncpp/test/test_object_02.json similarity index 100% rename from test/test_object_02.json rename to trunk/jsoncpp/test/test_object_02.json diff --git a/test/test_object_03.expected b/trunk/jsoncpp/test/test_object_03.expected similarity index 100% rename from test/test_object_03.expected rename to trunk/jsoncpp/test/test_object_03.expected diff --git a/test/test_object_03.json b/trunk/jsoncpp/test/test_object_03.json similarity index 100% rename from test/test_object_03.json rename to trunk/jsoncpp/test/test_object_03.json diff --git a/test/test_object_04.expected b/trunk/jsoncpp/test/test_object_04.expected similarity index 100% rename from test/test_object_04.expected rename to trunk/jsoncpp/test/test_object_04.expected diff --git a/test/test_object_04.json b/trunk/jsoncpp/test/test_object_04.json similarity index 100% rename from test/test_object_04.json rename to trunk/jsoncpp/test/test_object_04.json diff --git a/test/test_preserve_comment_01.expected b/trunk/jsoncpp/test/test_preserve_comment_01.expected similarity index 100% rename from test/test_preserve_comment_01.expected rename to trunk/jsoncpp/test/test_preserve_comment_01.expected diff --git a/test/test_preserve_comment_01.json b/trunk/jsoncpp/test/test_preserve_comment_01.json similarity index 100% rename from test/test_preserve_comment_01.json rename to trunk/jsoncpp/test/test_preserve_comment_01.json diff --git a/test/test_real_01.expected b/trunk/jsoncpp/test/test_real_01.expected similarity index 100% rename from test/test_real_01.expected rename to trunk/jsoncpp/test/test_real_01.expected diff --git a/test/test_real_01.json b/trunk/jsoncpp/test/test_real_01.json similarity index 100% rename from test/test_real_01.json rename to trunk/jsoncpp/test/test_real_01.json diff --git a/test/test_real_02.expected b/trunk/jsoncpp/test/test_real_02.expected similarity index 100% rename from test/test_real_02.expected rename to trunk/jsoncpp/test/test_real_02.expected diff --git a/test/test_real_02.json b/trunk/jsoncpp/test/test_real_02.json similarity index 100% rename from test/test_real_02.json rename to trunk/jsoncpp/test/test_real_02.json diff --git a/test/test_real_03.expected b/trunk/jsoncpp/test/test_real_03.expected similarity index 100% rename from test/test_real_03.expected rename to trunk/jsoncpp/test/test_real_03.expected diff --git a/test/test_real_03.json b/trunk/jsoncpp/test/test_real_03.json similarity index 100% rename from test/test_real_03.json rename to trunk/jsoncpp/test/test_real_03.json diff --git a/test/test_real_04.expected b/trunk/jsoncpp/test/test_real_04.expected similarity index 100% rename from test/test_real_04.expected rename to trunk/jsoncpp/test/test_real_04.expected diff --git a/test/test_real_04.json b/trunk/jsoncpp/test/test_real_04.json similarity index 100% rename from test/test_real_04.json rename to trunk/jsoncpp/test/test_real_04.json diff --git a/test/test_real_05.expected b/trunk/jsoncpp/test/test_real_05.expected similarity index 100% rename from test/test_real_05.expected rename to trunk/jsoncpp/test/test_real_05.expected diff --git a/test/test_real_05.json b/trunk/jsoncpp/test/test_real_05.json similarity index 100% rename from test/test_real_05.json rename to trunk/jsoncpp/test/test_real_05.json diff --git a/test/test_real_06.expected b/trunk/jsoncpp/test/test_real_06.expected similarity index 100% rename from test/test_real_06.expected rename to trunk/jsoncpp/test/test_real_06.expected diff --git a/test/test_real_06.json b/trunk/jsoncpp/test/test_real_06.json similarity index 100% rename from test/test_real_06.json rename to trunk/jsoncpp/test/test_real_06.json diff --git a/test/test_real_07.expected b/trunk/jsoncpp/test/test_real_07.expected similarity index 100% rename from test/test_real_07.expected rename to trunk/jsoncpp/test/test_real_07.expected diff --git a/test/test_real_07.json b/trunk/jsoncpp/test/test_real_07.json similarity index 100% rename from test/test_real_07.json rename to trunk/jsoncpp/test/test_real_07.json From 43974b4a95104248486aa7cb7aff315214047df4 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 17 Mar 2007 22:14:59 +0000 Subject: [PATCH 029/268] - Made FastWriter output more compact. - fixed bug in runjsontests.py script. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@29 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/writer.h | 23 +++++++++++++-- trunk/jsoncpp/src/jsontestrunner/main.cpp | 3 +- trunk/jsoncpp/src/lib_json/json_writer.cpp | 34 +++++++++++++++++----- trunk/jsoncpp/test/runjsontests.py | 2 +- 4 files changed, 50 insertions(+), 12 deletions(-) diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 3abf2ea..94582d9 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -9,21 +9,37 @@ namespace Json { class Value; + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + /** \brief Outputs a Value in JSON format without formatting (not human friendly). * * The JSON document is written in a single line. It is not intended for 'human' consumption, * but may be usefull to support feature such as RPC where bandwith is limited. * \sa Reader, Value */ - class JSON_API FastWriter + class JSON_API FastWriter : public Writer { public: - std::string write( const Value &root ); + FastWriter(); + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); private: void writeValue( const Value &value ); std::string document_; + bool yamlCompatiblityEnabled_; }; /** \brief Writes a Value in JSON format in a human friendly way. @@ -49,11 +65,12 @@ namespace Json { public: StyledWriter(); + public: // overridden from Writer /** \brief Serialize a Value in JSON format. * \param root Value to serialize. * \return String containing the JSON document that represent the root value. */ - std::string write( const Value &root ); + virtual std::string write( const Value &root ); private: void writeValue( const Value &value ); diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 1d43720..a576564 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -115,7 +115,8 @@ rewriteValueTree( const std::string &rewritePath, const Json::Value &root, std::string &rewrite ) { -// Json::FastWriter writer; + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); Json::StyledWriter writer; rewrite = writer.write( root ); FILE *fout = fopen( rewritePath.c_str(), "wt" ); diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index f1d8168..47577fa 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -67,10 +67,29 @@ std::string valueToQuotedString( const char *value ) return std::string("\"") + value + "\""; } +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + // Class FastWriter // ////////////////////////////////////////////////////////////////// +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + std::string FastWriter::write( const Value &root ) { @@ -106,33 +125,34 @@ FastWriter::writeValue( const Value &value ) break; case arrayValue: { - document_ += "[ "; + document_ += "["; int size = value.size(); for ( int index =0; index < size; ++index ) { if ( index > 0 ) - document_ += ", "; + document_ += ","; writeValue( value[index] ); } - document_ += " ]"; + document_ += "]"; } break; case objectValue: { Value::Members members( value.getMemberNames() ); - document_ += "{ "; + document_ += "{"; for ( Value::Members::iterator it = members.begin(); it != members.end(); ++it ) { const std::string &name = *it; if ( it != members.begin() ) - document_ += ", "; + document_ += ","; document_ += valueToQuotedString( name.c_str() ); - document_ += " : "; + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; writeValue( value[name] ); } - document_ += " }"; + document_ += "}"; } break; } diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index a5284fb..de7bd9d 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -83,7 +83,7 @@ def runAllTests( jsontest_executable_path, input_dir = None ): sys.exit( 1 ) jsontest_executable_path = os.path.normpath( os.path.abspath( sys.argv[1] ) ) - if len(sys.argv) > 1: + if len(sys.argv) > 2: input_path = os.path.normpath( os.path.abspath( sys.argv[2] ) ) else: input_path = None From 2567f9d66741d0b4833a645769356a7096c8701d Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 03:31:35 +0000 Subject: [PATCH 030/268] Added newline b/c of gcc--warning: no newline at end of file git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@30 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/jsontestrunner/main.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index a576564..98cd8fd 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -183,4 +183,5 @@ int main( int argc, const char *argv[] ) } return exitCode; -} \ No newline at end of file +} + From 78c9131a2301c85d106c0f1956f7aa31e30050d6 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 04:55:07 +0000 Subject: [PATCH 031/268] Fixed gcc 2.95.3 problem. Bug: 1570919 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@31 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 5a59956..b3e90b6 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -605,7 +605,12 @@ Value::operator >( const Value &other ) const bool Value::operator ==( const Value &other ) const { - if ( type_ != other.type_ ) + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) return false; switch ( type_ ) { From dc5c98b34c9b480c9f31ea182ec7bdbe63b32f70 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 05:04:38 +0000 Subject: [PATCH 032/268] [1587188]Fixed parsing of comment at tail of object. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@32 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 9d0e665..2a4180d 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -395,12 +395,17 @@ Reader::readObject( Token &tokenStart ) Token comma; if ( !readToken( comma ) || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator ) ) + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) { return addErrorAndRecover( "Missing ',' or '}' in object declaration", comma, tokenObjectEnd ); } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); if ( comma.type_ == tokenObjectEnd ) return true; } From a87e9a79e107ca80d98bcb360006b77467dfa716 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 05:56:39 +0000 Subject: [PATCH 033/268] [1611376]writer now escapes special characters. When no special chars are present, old behavior is retained. New method might have a performance penalty b/c of operator new inside std::string. (This would not exist if the whole thing operated on ostream instead, I think.) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@33 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 1 + trunk/jsoncpp/src/lib_json/json_writer.cpp | 40 +++++++++++++++++++++- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 2a4180d..fa3c5eb 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -560,6 +560,7 @@ Reader::decodeString( Token &token, std::string &decoded ) if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) return false; // @todo encode unicode as utf8. + // @todo remember to alter the writer too. } break; default: diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 47577fa..6fa5993 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -64,7 +64,45 @@ std::string valueToString( bool value ) std::string valueToQuotedString( const char *value ) { - return std::string("\"") + value + "\""; + // Not sure how to handle unicode... + if (std::strpbrk(value, "\"\\\b\f\n\r\t") == NULL) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string result("\""); + for (const char* c=value; *c != 0; ++c){ + switch(*c){ + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something.) + default: + result += *c; + } + } + return result + "\""; } // Class Writer From ab4d6bf47dde86aa3255c57d7a32ccda900d9a7d Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 06:12:28 +0000 Subject: [PATCH 034/268] [1611376]by reserving the max string-size when escaped chars exist, we should save some runtime. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@34 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 6fa5993..063a66f 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -70,7 +70,10 @@ std::string valueToQuotedString( const char *value ) // We have to walk value and escape any special characters. // Appending to std::string is not efficient, but this should be rare. // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string result("\""); + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; for (const char* c=value; *c != 0; ++c){ switch(*c){ case '\"': @@ -102,7 +105,8 @@ std::string valueToQuotedString( const char *value ) result += *c; } } - return result + "\""; + result += "\""; + return result; } // Class Writer From d8ad53f0b61ec92f87c7b8162793ba61e18cc634 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 06:38:29 +0000 Subject: [PATCH 035/268] Added comments, pre- and post-condition for resize(), clear(), Value(ValueType). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@35 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 5b5e460..fe2f9eb 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -166,6 +166,13 @@ namespace Json { #endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION public: + /** \brief Create a default Value of the given type. + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + */ Value( ValueType type = nullValue ); Value( Int value ); Value( UInt value ); @@ -230,11 +237,15 @@ namespace Json { UInt size() const; /// Removes all object members and array elements. + /// @pre type() is arrayValue, objectValue, or nullValue + /// @post type() is unchanged void clear(); /// Resize the array to size elements. /// New elements are initialized to null. /// May only be called on nullValue or arrayValue. + /// @pre type() is arrayValue or nullValue + /// @post type() is arrayValue void resize( UInt size ); /// Access an array element (zero based index ). From 250e393b1609f25553e2be56261053cdcc1dec2a Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 07:05:19 +0000 Subject: [PATCH 036/268] setComment() will assert if comment does not start with / (or if it were NULL, which would have seg-faulted before). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@36 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 2 ++ trunk/jsoncpp/src/lib_json/json_value.cpp | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index fe2f9eb..19e2966 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -317,8 +317,10 @@ namespace Json { // EnumValues enumValues() const; //# endif + /// Comments must be //... or /* ... */ void setComment( const char *comment, CommentPlacement placement ); + /// Comments must be //... or /* ... */ void setComment( const std::string &comment, CommentPlacement placement ); bool hasComment( CommentPlacement placement ) const; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index b3e90b6..0fb735f 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1,3 +1,4 @@ +#include #include #include #include @@ -149,6 +150,9 @@ Value::CommentInfo::setComment( const char *text ) { if ( comment_ ) valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]==NULL || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. comment_ = valueAllocator()->duplicateStringValue( text ); } From 6e6c67f1d33ff28ec9cfeeb4848906bca4adee14 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 07:16:59 +0000 Subject: [PATCH 037/268] fixed compiler warning git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@37 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 0fb735f..e93e346 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -151,7 +151,7 @@ Value::CommentInfo::setComment( const char *text ) if ( comment_ ) valueAllocator()->releaseStringValue( comment_ ); JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]==NULL || text[0]=='/', "Comments must start with /"); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); // It seems that /**/ style comments are acceptable as well. comment_ = valueAllocator()->duplicateStringValue( text ); } From f4a7183875f83b1a34482c7f6efd678094c9b18b Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 07:28:19 +0000 Subject: [PATCH 038/268] Added/clarified comments. Move extra verbiage out of brief sections. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@38 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 19e2966..376b495 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -167,11 +167,19 @@ namespace Json { public: /** \brief Create a default Value of the given type. + This is a very useful constructor. To create an empty array, pass arrayValue. To create an empty object, pass objectValue. Another Value can then be set to this one by assignment. This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode */ Value( ValueType type = nullValue ); Value( Int value ); @@ -179,6 +187,7 @@ namespace Json { Value( double value ); Value( const char *value ); /** \brief Constructs a value from a static string. + * Like other value string constructor but do not duplicate the string for * internal storage. The given string must remain alive after the call to this * constructor. @@ -197,6 +206,9 @@ namespace Json { ~Value(); Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. void swap( Value &other ); ValueType type() const; @@ -237,15 +249,15 @@ namespace Json { UInt size() const; /// Removes all object members and array elements. - /// @pre type() is arrayValue, objectValue, or nullValue - /// @post type() is unchanged + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged void clear(); /// Resize the array to size elements. /// New elements are initialized to null. /// May only be called on nullValue or arrayValue. - /// @pre type() is arrayValue or nullValue - /// @post type() is arrayValue + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue void resize( UInt size ); /// Access an array element (zero based index ). @@ -273,6 +285,7 @@ namespace Json { /// Access an object value by name, returns null if there is no member with that name. const Value &operator[]( const std::string &key ) const; /** \brief Access an object value by name, create a null member if it does not exist. + * If the object as no entry for that name, then the member name used to store * the new entry is not duplicated. * Example of use: From c032bfe26c0160ce1ec2b8235cdf8cd409a6b153 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 08:30:20 +0000 Subject: [PATCH 039/268] Added removeMember() and altered a few comments. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@39 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 40 ++++++++++++++++------- trunk/jsoncpp/src/lib_json/json_value.cpp | 33 +++++++++++++++++++ 2 files changed, 61 insertions(+), 12 deletions(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 376b495..2453cb1 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -270,9 +270,10 @@ namespace Json { /// otherwise returns defaultValue. Value get( UInt index, const Value &defaultValue ) const; - /// Returns true if index < size(). + /// Return true if index < size(). bool isValidIndex( UInt index ) const; - /// Append value to array at the end. + /// \brief Append value to array at the end. + /// /// Equivalent to jsonvalue[jsonvalue.size()] = value; Value &append( const Value &value ); @@ -302,27 +303,41 @@ namespace Json { /// Access an object value by name, returns null if there is no member with that name. const Value &operator[]( const CppTL::ConstString &key ) const; # endif - /// Returns the member named key if it exist, defaultValue otherwise. + /// Return the member named key if it exist, defaultValue otherwise. Value get( const char *key, const Value &defaultValue ) const; - /// Returns the member named key if it exist, defaultValue otherwise. + /// Return the member named key if it exist, defaultValue otherwise. Value get( const std::string &key, const Value &defaultValue ) const; # ifdef JSON_USE_CPPTL - /// Returns the member named key if it exist, defaultValue otherwise. + /// Return the member named key if it exist, defaultValue otherwise. Value get( const CppTL::ConstString &key, const Value &defaultValue ) const; # endif - /// Returns true if the object has a member named key. + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. bool isMember( const char *key ) const; - /// Returns true if the object has a member named key. + /// Return true if the object has a member named key. bool isMember( const std::string &key ) const; # ifdef JSON_USE_CPPTL - /// Returns true if the object has a member named key. + /// Return true if the object has a member named key. bool isMember( const CppTL::ConstString &key ) const; # endif - // Returns a list of the member names. + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue Members getMemberNames() const; //# ifdef JSON_USE_CPPTL @@ -337,6 +352,7 @@ namespace Json { void setComment( const std::string &comment, CommentPlacement placement ); bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. std::string getComment( CommentPlacement placement ) const; std::string toStyledString() const; @@ -864,13 +880,13 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator return computeDistance( other ); } - /// Returns either the index or the member name of the referenced value as a Value. + /// Return either the index or the member name of the referenced value as a Value. Value key() const; - /// Returns the index of the referenced Value. -1 if it is not an arrayValue. + /// Return the index of the referenced Value. -1 if it is not an arrayValue. Value::UInt index() const; - /// Returns the member name of the referenced Value. "" if it is not an objectValue. + /// Return the member name of the referenced Value. "" if it is not an objectValue. const char *memberName() const; protected: diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index e93e346..5fbbe13 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1103,6 +1103,37 @@ Value::get( const std::string &key, return get( key.c_str(), defaultValue ); } +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} # ifdef JSON_USE_CPPTL Value @@ -1140,6 +1171,8 @@ Value::Members Value::getMemberNames() const { JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); Members members; members.reserve( value_.map_->size() ); #ifndef JSON_VALUE_USE_INTERNAL_MAP From e14cb743a03fc87e8c66ae5ee6556bd02a214b62 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 08:55:25 +0000 Subject: [PATCH 040/268] Added empty() operator-bang and isNull() git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@40 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 10 +++++++++- trunk/jsoncpp/src/lib_json/json_value.cpp | 24 +++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 2453cb1..de09d5f 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -233,6 +233,7 @@ namespace Json { double asDouble() const; bool asBool() const; + bool isNull() const; bool isBool() const; bool isInt() const; bool isUInt() const; @@ -248,7 +249,14 @@ namespace Json { /// Number of values in array or object UInt size() const; - /// Removes all object members and array elements. + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. /// \pre type() is arrayValue, objectValue, or nullValue /// \post type() is unchanged void clear(); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 5fbbe13..329b634 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -888,6 +888,23 @@ Value::size() const } +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + void Value::clear() { @@ -1217,6 +1234,13 @@ Value::getMemberNames() const //# endif +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + bool Value::isBool() const { From fc7a72640fe71d57d0f49d258b6f2c8c36b196dc Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 08:57:57 +0000 Subject: [PATCH 041/268] Renamed buildLibary to buildLibrary and added SharedLibrary rule. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@41 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 7 +++++-- trunk/jsoncpp/src/lib_json/sconscript | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index d73b629..e820659 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -125,14 +125,17 @@ def buildJSONTests( env, target_sources, target_name ): check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) env.AlwaysBuild( check_alias_target ) -def buildLibary( env, target_sources, target_name ): +def buildLibrary( env, target_sources, target_name ): static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', source=target_sources ) + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) global lib_dir env.Install( lib_dir, static_lib ) + env.Install( lib_dir, shared_lib ) env['SRCDIST_ADD']( source=[target_sources] ) -Export( 'env env_testing buildJSONExample buildLibary buildJSONTests' ) +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' ) def buildProjectInDirectory( target_directory ): global build_dir diff --git a/trunk/jsoncpp/src/lib_json/sconscript b/trunk/jsoncpp/src/lib_json/sconscript index cc044fb..f6520d1 100644 --- a/trunk/jsoncpp/src/lib_json/sconscript +++ b/trunk/jsoncpp/src/lib_json/sconscript @@ -1,6 +1,6 @@ -Import( 'env buildLibary' ) +Import( 'env buildLibrary' ) -buildLibary( env, Split( """ +buildLibrary( env, Split( """ json_reader.cpp json_value.cpp json_writer.cpp From 8d93e15758a18567507811f5d56722833c53eeb2 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 09:57:01 +0000 Subject: [PATCH 042/268] Added istream/ostream funcs/operators git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@42 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/jsoncpp.dox | 7 +++++ trunk/jsoncpp/include/json/reader.h | 32 ++++++++++++++++++++++ trunk/jsoncpp/include/json/writer.h | 7 ++++- trunk/jsoncpp/src/lib_json/json_reader.cpp | 29 ++++++++++++++++++++ trunk/jsoncpp/src/lib_json/json_writer.cpp | 8 ++++++ 5 files changed, 82 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 5463463..3667fa6 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -65,6 +65,13 @@ root["indent"]["use_space"] = getCurrentIndentUseSpace(); Json::StyledWriter writer; // Make a new JSON document for the configuration. Preserve original comments. std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; \endcode \section _plinks Build instructions diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 60594d9..cb76fab 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -6,6 +6,7 @@ # include # include # include +# include namespace Json { @@ -47,6 +48,12 @@ namespace Json { Value &root, bool collectComments = true ); + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream&, + Value &root, + bool collectComments = true ); + /** \brief Returns a user friendly string that list errors in the parsed document. * \return Formatted error message with the list of errors with their location in * the parsed document. An empty string is returned if no error occurred @@ -144,6 +151,31 @@ namespace Json { bool collectComments_; }; + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); } // namespace Json diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 94582d9..1312dbc 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -4,6 +4,7 @@ # include "value.h" # include # include +# include namespace Json { @@ -68,7 +69,7 @@ namespace Json { public: // overridden from Writer /** \brief Serialize a Value in JSON format. * \param root Value to serialize. - * \return String containing the JSON document that represent the root value. + * \return String containing the JSON document that represents the root value. */ virtual std::string write( const Value &root ); @@ -102,6 +103,10 @@ namespace Json { std::string JSON_API valueToString( bool value ); std::string JSON_API valueToQuotedString( const char *value ); + /// \brief Output using the StyledWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + } // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index fa3c5eb..91b07c6 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -3,6 +3,8 @@ #include #include #include +#include +#include #if _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. @@ -52,6 +54,23 @@ Reader::parse( const std::string &document, return parse( begin, end, root, collectComments ); } +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + bool Reader::parse( const char *beginDoc, const char *endDoc, Value &root, @@ -718,4 +737,14 @@ Reader::getFormatedErrorMessages() const } +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + } // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 063a66f..5f000df 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #if _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. @@ -475,5 +476,12 @@ StyledWriter::normalizeEOL( const std::string &text ) return normalized; } +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledWriter writer; + sout << writer.write(root); + return sout; +} + } // namespace Json From 840420ac25a30ec5af00ca79cc165d29ab0c38e0 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 23 Mar 2007 10:19:49 +0000 Subject: [PATCH 043/268] Fixed 2.95.3 header probs git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@43 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/reader.h | 2 +- trunk/jsoncpp/include/json/writer.h | 2 +- trunk/jsoncpp/src/lib_json/json_reader.cpp | 2 +- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index cb76fab..2d39608 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -6,7 +6,7 @@ # include # include # include -# include +# include namespace Json { diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 1312dbc..ea9716d 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -4,7 +4,7 @@ # include "value.h" # include # include -# include +# include namespace Json { diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 91b07c6..529703b 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -3,7 +3,7 @@ #include #include #include -#include +#include #include #if _MSC_VER >= 1400 // VC++ 8.0 diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 5f000df..8af01f3 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -2,7 +2,7 @@ #include #include #include -#include +#include #if _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. From ae460819145279e67cb391eb1b90aff7bedf258b Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 19 Apr 2007 22:20:46 +0000 Subject: [PATCH 044/268] Added virtual destructor to StyledWriter to avoid -Wall warning from g++ git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@44 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/writer.h | 1 + 1 file changed, 1 insertion(+) diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index ea9716d..5ec8641 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -65,6 +65,7 @@ namespace Json { { public: StyledWriter(); + virtual ~StyledWriter(){} public: // overridden from Writer /** \brief Serialize a Value in JSON format. From 0b4a08b871d13791728305203ef3b2dc085d4d8b Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 9 May 2007 19:35:15 +0000 Subject: [PATCH 045/268] Fixed compilation warnings. Added -Wall to linux-gcc compilation. JSON_ASSERT_MESSAGE now throws exception (but JSON_ASSERT does not). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@45 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 2 +- trunk/jsoncpp/include/json/value.h | 2 +- trunk/jsoncpp/src/jsontestrunner/main.cpp | 4 +-- trunk/jsoncpp/src/lib_json/json_value.cpp | 35 +++++++++++----------- trunk/jsoncpp/src/lib_json/json_writer.cpp | 6 ++-- 5 files changed, 25 insertions(+), 24 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index e820659..b4cba59 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -76,7 +76,7 @@ elif platform == 'mingw': env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) elif platform == 'linux-gcc': env.Tool( 'default' ) - env.Append( LIBS = ['pthread'] ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) else: print "UNSUPPORTED PLATFORM." env.Exit(1) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index de09d5f..480f433 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -518,7 +518,7 @@ namespace Json { class ValueAllocator { public: - enum { unknown = -1 }; + enum { unknown = (unsigned)-1 }; virtual ~ValueAllocator(); diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 98cd8fd..1d8b303 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -13,12 +13,12 @@ readInputTestFile( const char *path ) if ( !file ) return std::string(""); fseek( file, 0, SEEK_END ); - int size = ftell( file ); + long size = ftell( file ); fseek( file, 0, SEEK_SET ); std::string text; char *buffer = new char[size+1]; buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == size ) + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) text = buffer; fclose( file ); delete[] buffer; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 329b634..bc4ba3d 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include "assert.h" #ifdef JSON_USE_CPPTL # include @@ -13,7 +14,7 @@ #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) assert( condition && message ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); namespace Json { @@ -265,8 +266,8 @@ Value::CZString::isStaticString() const */ Value::Value( ValueType type ) : type_( type ) - , comments_( 0 ) , allocated_( 0 ) + , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -680,7 +681,7 @@ Value::asString() const case realValue: case arrayValue: case objectValue: - JSON_ASSERT( "Type is not convertible to double" && false ); + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); default: JSON_ASSERT_UNREACHABLE; } @@ -705,17 +706,17 @@ Value::asInt() const case intValue: return value_.int_; case uintValue: - JSON_ASSERT( value_.uint_ < maxInt && "integer out of signed integer range" ); + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); return value_.uint_; case realValue: - JSON_ASSERT( value_.real_ >= minInt && value_.real_ <= maxInt && "Real out of signed integer range" ); + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); return Int( value_.real_ ); case booleanValue: return value_.bool_ ? 1 : 0; case stringValue: case arrayValue: case objectValue: - JSON_ASSERT( "Type is not convertible to double" && false ); + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); default: JSON_ASSERT_UNREACHABLE; } @@ -730,19 +731,19 @@ Value::asUInt() const case nullValue: return 0; case intValue: - JSON_ASSERT( value_.int_ >= 0 && "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); return value_.int_; case uintValue: return value_.uint_; case realValue: - JSON_ASSERT( value_.real_ >= 0 && value_.real_ <= maxUInt && "Real out of unsigned integer range" ); + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); return UInt( value_.real_ ); case booleanValue: return value_.bool_ ? 1 : 0; case stringValue: case arrayValue: case objectValue: - JSON_ASSERT( "Type is not convertible to double" && false ); + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); default: JSON_ASSERT_UNREACHABLE; } @@ -767,7 +768,7 @@ Value::asDouble() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT( "Type is not convertible to double" && false ); + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); default: JSON_ASSERT_UNREACHABLE; } @@ -816,7 +817,7 @@ Value::isConvertibleTo( ValueType other ) const || other == booleanValue; case uintValue: return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= maxInt ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) || other == uintValue || other == realValue || other == stringValue @@ -1499,22 +1500,22 @@ PathArgument::PathArgument() PathArgument::PathArgument( Value::UInt index ) - : kind_( kindIndex ) - , index_( index ) + : index_( index ) + , kind_( kindIndex ) { } PathArgument::PathArgument( const char *key ) - : kind_( kindKey ) - , key_( key ) + : key_( key ) + , kind_( kindKey ) { } PathArgument::PathArgument( const std::string &key ) - : kind_( kindKey ) - , key_( key.c_str() ) + : key_( key.c_str() ) + , kind_( kindKey ) { } diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 8af01f3..7485e5a 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -290,7 +290,7 @@ StyledWriter::writeValue( const Value &value ) void StyledWriter::writeArrayValue( const Value &value ) { - int size = value.size(); + unsigned size = value.size(); if ( size == 0 ) pushValue( "[]" ); else @@ -301,7 +301,7 @@ StyledWriter::writeArrayValue( const Value &value ) writeWithIndent( "[" ); indent(); bool hasChildValue = !childValues_.empty(); - int index =0; + unsigned index =0; while ( true ) { const Value &childValue = value[index]; @@ -328,7 +328,7 @@ StyledWriter::writeArrayValue( const Value &value ) { assert( childValues_.size() == size ); document_ += "[ "; - for ( int index =0; index < size; ++index ) + for ( unsigned index =0; index < size; ++index ) { if ( index > 0 ) document_ += ", "; From 64ecf13ffbcbd6a040dd1525991f428db9ae171b Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 15:48:30 +0000 Subject: [PATCH 046/268] Added prop svn:eol-style native. Stripped carriage-returns on unix. Hopefully, this will work for Windows too now. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@46 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- .../src/lib_json/json_batchallocator.h | 250 +- .../src/lib_json/json_internalarray.inl | 896 ++--- .../jsoncpp/src/lib_json/json_internalmap.inl | 1214 +++--- trunk/jsoncpp/src/lib_json/json_reader.cpp | 1500 ++++---- trunk/jsoncpp/src/lib_json/json_value.cpp | 3404 ++++++++--------- .../src/lib_json/json_valueiterator.inl | 514 +-- trunk/jsoncpp/src/lib_json/json_writer.cpp | 974 ++--- trunk/jsoncpp/src/lib_json/lib_json.vcproj | 422 +- trunk/jsoncpp/src/lib_json/sconscript | 16 +- 9 files changed, 4595 insertions(+), 4595 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_batchallocator.h b/trunk/jsoncpp/src/lib_json/json_batchallocator.h index 1e35c19..87ea5ed 100644 --- a/trunk/jsoncpp/src/lib_json/json_batchallocator.h +++ b/trunk/jsoncpp/src/lib_json/json_batchallocator.h @@ -1,125 +1,125 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/trunk/jsoncpp/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl index 2b3d859..9b985d2 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalarray.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalarray.inl @@ -1,448 +1,448 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/trunk/jsoncpp/src/lib_json/json_internalmap.inl b/trunk/jsoncpp/src/lib_json/json_internalmap.inl index 8e60b46..1977148 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalmap.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalmap.inl @@ -1,607 +1,607 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 529703b..0ded777 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -1,750 +1,750 @@ -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() -{ -} - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - if ( !readToken( token ) - || ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ) ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - // @todo encode unicode as utf8. - // @todo remember to alter the writer too. - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() +{ +} + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + if ( !readToken( token ) + || ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ) ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + // @todo encode unicode as utf8. + // @todo remember to alter the writer too. + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index bc4ba3d..a917772 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1,1702 +1,1702 @@ -#include -#include -#include -#include -#include -#include "assert.h" -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); -const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); -const Value::UInt Value::maxUInt = Value::UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json +#include +#include +#include +#include +#include +#include "assert.h" +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); +const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); +const Value::UInt Value::maxUInt = Value::UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index c84c47d..8da3bf7 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -1,257 +1,257 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - return difference_type( std::distance( current_, other.current_ ) ); -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -Value::UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + return difference_type( std::distance( current_, other.current_ ) ); +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +Value::UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 7485e5a..c6b8738 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -1,487 +1,487 @@ -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Value::Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( Value::UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( Value::UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#ifdef __STDC_SECURE_LIB__ // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%.16g", value); -#else - sprintf(buffer, "%.16g", value); -#endif - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (std::strpbrk(value, "\"\\\b\f\n\r\t") == NULL) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c){ - switch(*c){ - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something.) - default: - result += *c; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledWriter writer; - sout << writer.write(root); - return sout; -} - - -} // namespace Json +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Value::Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( Value::UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( Value::UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#ifdef __STDC_SECURE_LIB__ // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%.16g", value); +#else + sprintf(buffer, "%.16g", value); +#endif + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (std::strpbrk(value, "\"\\\b\f\n\r\t") == NULL) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c){ + switch(*c){ + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something.) + default: + result += *c; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledWriter writer; + sout << writer.write(root); + return sout; +} + + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/lib_json.vcproj b/trunk/jsoncpp/src/lib_json/lib_json.vcproj index d1a6f05..3fa9cf0 100644 --- a/trunk/jsoncpp/src/lib_json/lib_json.vcproj +++ b/trunk/jsoncpp/src/lib_json/lib_json.vcproj @@ -1,211 +1,211 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/trunk/jsoncpp/src/lib_json/sconscript b/trunk/jsoncpp/src/lib_json/sconscript index f6520d1..6e7c6c8 100644 --- a/trunk/jsoncpp/src/lib_json/sconscript +++ b/trunk/jsoncpp/src/lib_json/sconscript @@ -1,8 +1,8 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) From 86c2f28b9cf138d13a0e9a1dd99d65d7c2912027 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 15:51:04 +0000 Subject: [PATCH 047/268] Added prop svn:eol-style native. Stripped carriage-returns on unix. Hopefully, this will work for Windows too now. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@47 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/autolink.h | 38 +- trunk/jsoncpp/include/json/config.h | 80 +- trunk/jsoncpp/include/json/forwards.h | 62 +- trunk/jsoncpp/include/json/json.h | 18 +- trunk/jsoncpp/include/json/reader.h | 364 ++--- trunk/jsoncpp/include/json/value.h | 2098 ++++++++++++------------- trunk/jsoncpp/include/json/writer.h | 230 +-- 7 files changed, 1445 insertions(+), 1445 deletions(-) diff --git a/trunk/jsoncpp/include/json/autolink.h b/trunk/jsoncpp/include/json/autolink.h index ef5e0ab..37c9258 100644 --- a/trunk/jsoncpp/include/json/autolink.h +++ b/trunk/jsoncpp/include/json/autolink.h @@ -1,19 +1,19 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index c1bcb3f..52de4e4 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -1,40 +1,40 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index 704d6e4..3372a55 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -1,31 +1,31 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - class FastWriter; - class Reader; - class StyledWriter; - - // value.h - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + class FastWriter; + class Reader; + class StyledWriter; + + // value.h + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/json.h b/trunk/jsoncpp/include/json/json.h index c2a24ea..a539740 100644 --- a/trunk/jsoncpp/include/json/json.h +++ b/trunk/jsoncpp/include/json/json.h @@ -1,9 +1,9 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" - -#endif // JSON_JSON_H_INCLUDED +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 2d39608..f1bc5a2 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -1,182 +1,182 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "forwards.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Unserialize a JSON document into a Value. - * - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - Reader(); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream&, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "forwards.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Unserialize a JSON document into a Value. + * + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + Reader(); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream&, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 480f433..1eb5e6c 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -1,1049 +1,1049 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef int Int; - typedef unsigned int UInt; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( Value::UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - Value::UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - Value::UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef int Int; + typedef unsigned int UInt; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( Value::UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + Value::UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + Value::UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 5ec8641..cf0eb0c 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -1,115 +1,115 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Value::Int value ); - std::string JSON_API valueToString( Value::UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Value::Int value ); + std::string JSON_API valueToString( Value::UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED From b8038b476b6ff851351074cb0fb5d421cedf4d1d Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 15:52:48 +0000 Subject: [PATCH 048/268] Added prop svn:eol-style native. Stripped carriage-returns on unix. Hopefully, this will work for Windows too now. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@48 1f120ed1-78a5-a849-adca-83f0a9e25bb6 From 82f94582fe0774cf14ea1cc377a681a76f182e81 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 15:55:50 +0000 Subject: [PATCH 049/268] Added StyledStreamWriter, which has no reason to derive from Writer, since its write() method does cannot return a string and must take a stream. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@49 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/writer.h | 63 ++++- trunk/jsoncpp/src/lib_json/json_writer.cpp | 285 ++++++++++++++++++++- 2 files changed, 344 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index cf0eb0c..cfa92c6 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -30,6 +30,7 @@ namespace Json { { public: FastWriter(); + virtual ~FastWriter(){} void enableYAMLCompatibility(); @@ -61,7 +62,7 @@ namespace Json { * * \sa Reader, Value, Value::setComment() */ - class JSON_API StyledWriter + class JSON_API StyledWriter: public Writer { public: StyledWriter(); @@ -98,13 +99,71 @@ namespace Json { bool addChildValues_; }; + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + std::string JSON_API valueToString( Value::Int value ); std::string JSON_API valueToString( Value::UInt value ); std::string JSON_API valueToString( double value ); std::string JSON_API valueToString( bool value ); std::string JSON_API valueToQuotedString( const char *value ); - /// \brief Output using the StyledWriter. + /// \brief Output using the StyledStreamWriter. /// \see Json::operator>>() std::ostream& operator<<( std::ostream&, const Value &root ); diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index c6b8738..b770c1e 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -476,10 +476,291 @@ StyledWriter::normalizeEOL( const std::string &text ) return normalized; } + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + std::ostream& operator<<( std::ostream &sout, const Value &root ) { - Json::StyledWriter writer; - sout << writer.write(root); + Json::StyledStreamWriter writer; + writer.write(sout, root); return sout; } From 7cc3a132035916c7af4ca62943c109e2584f7d4d Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 17:02:01 +0000 Subject: [PATCH 050/268] Indentation is not perfect, but pretty good. Not sure how to perfect, given the difficulty of testing the last char written. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@50 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index b770c1e..500b322 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -672,7 +672,7 @@ StyledStreamWriter::writeIndent() *document_ << '\n'; } */ - *document_ << indentString_; + *document_ << '\n' << indentString_; } From 7afad2e5287634bbb80070e364811c299df1f4c5 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 13 Jun 2007 17:02:59 +0000 Subject: [PATCH 051/268] I always have compiler problems when using operator[](unsigned) when I pass int instead, so I added a note to the comments. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@51 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 1eb5e6c..3884b08 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -271,8 +271,12 @@ namespace Json { /// Access an array element (zero based index ). /// If the array contains less than index element, then null value are inserted /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) Value &operator[]( UInt index ); /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) const Value &operator[]( UInt index ) const; /// If the array contains at least index+1 elements, returns the element value, /// otherwise returns defaultValue. From 25e915d545ad023e4239fa4980efe6cf51ff8b91 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 17:58:59 +0000 Subject: [PATCH 052/268] Added svn:eol-style native git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@52 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 348 ++++++++++++------------- trunk/jsoncpp/doc/doxyfile.in | 464 +++++++++++++++++----------------- trunk/jsoncpp/doc/footer.html | 46 ++-- trunk/jsoncpp/doc/header.html | 48 ++-- trunk/jsoncpp/doc/jsoncpp.dox | 194 +++++++------- trunk/jsoncpp/doc/readme.txt | 2 +- trunk/jsoncpp/doc/sconscript | 44 ++-- 7 files changed, 573 insertions(+), 573 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index b4cba59..e32ed90 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -1,174 +1,174 @@ -import os -import os.path -import sys - -JSONCPP_VERSION = '0.1' -DIST_DIR = '#dist' - -options = Options() -options.Add( EnumOption('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -env = Environment( ENV = {'PATH' : os.environ['PATH']}, - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( LIBS = ['pthreads'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform == 'linux-gcc': - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('doxygen') -env.Tool('substinfile') -env.Tool('targz') -env.Tool('srcdist') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) -env['SRCDIST_BUILDER'] = env.TarGz - -env_testing = env.Copy( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Copy() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source), jsontest_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source - -##def buildDoc( doxyfile_path ): -## doc_cmd = env.Doxygen( doxyfile_path ) - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'doc' ) +import os +import os.path +import sys + +JSONCPP_VERSION = '0.1' +DIST_DIR = '#dist' + +options = Options() +options.Add( EnumOption('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +env = Environment( ENV = {'PATH' : os.environ['PATH']}, + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( LIBS = ['pthreads'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform == 'linux-gcc': + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('doxygen') +env.Tool('substinfile') +env.Tool('targz') +env.Tool('srcdist') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) +env['SRCDIST_BUILDER'] = env.TarGz + +env_testing = env.Copy( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Copy() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source), jsontest_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source + +##def buildDoc( doxyfile_path ): +## doc_cmd = env.Doxygen( doxyfile_path ) + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'doc' ) diff --git a/trunk/jsoncpp/doc/doxyfile.in b/trunk/jsoncpp/doc/doxyfile.in index 15ec5bd..f19f037 100644 --- a/trunk/jsoncpp/doc/doxyfile.in +++ b/trunk/jsoncpp/doc/doxyfile.in @@ -1,232 +1,232 @@ -# Doxyfile 1.4.3 - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- -PROJECT_NAME = "JsonCpp" -PROJECT_NUMBER = %JSONCPP_VERSION% -OUTPUT_DIRECTORY = %DOC_TOPDIR% -CREATE_SUBDIRS = NO -OUTPUT_LANGUAGE = English -USE_WINDOWS_ENCODING = NO -BRIEF_MEMBER_DESC = YES -REPEAT_BRIEF = YES -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the -ALWAYS_DETAILED_SEC = NO -INLINE_INHERITED_MEMB = NO -FULL_PATH_NAMES = YES -STRIP_FROM_PATH = %TOPDIR% -STRIP_FROM_INC_PATH = %TOPDIR%/include -SHORT_NAMES = NO -JAVADOC_AUTOBRIEF = NO -MULTILINE_CPP_IS_BRIEF = NO -DETAILS_AT_TOP = NO -INHERIT_DOCS = YES -DISTRIBUTE_GROUP_DOC = NO -SEPARATE_MEMBER_PAGES = NO -TAB_SIZE = 3 -ALIASES = -OPTIMIZE_OUTPUT_FOR_C = NO -OPTIMIZE_OUTPUT_JAVA = NO -SUBGROUPING = YES -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- -EXTRACT_ALL = YES -EXTRACT_PRIVATE = NO -EXTRACT_STATIC = YES -EXTRACT_LOCAL_CLASSES = NO -EXTRACT_LOCAL_METHODS = NO -HIDE_UNDOC_MEMBERS = NO -HIDE_UNDOC_CLASSES = NO -HIDE_FRIEND_COMPOUNDS = NO -HIDE_IN_BODY_DOCS = NO -INTERNAL_DOCS = YES -CASE_SENSE_NAMES = NO -HIDE_SCOPE_NAMES = NO -SHOW_INCLUDE_FILES = YES -INLINE_INFO = YES -SORT_MEMBER_DOCS = YES -SORT_BRIEF_DOCS = NO -SORT_BY_SCOPE_NAME = NO -GENERATE_TODOLIST = YES -GENERATE_TESTLIST = YES -GENERATE_BUGLIST = YES -GENERATE_DEPRECATEDLIST= YES -ENABLED_SECTIONS = -MAX_INITIALIZER_LINES = 30 -SHOW_USED_FILES = YES -SHOW_DIRECTORIES = YES -FILE_VERSION_FILTER = -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- -QUIET = NO -WARNINGS = YES -WARN_IF_UNDOCUMENTED = YES -WARN_IF_DOC_ERROR = YES -WARN_NO_PARAMDOC = NO -WARN_FORMAT = "$file:$line: $text" -WARN_LOGFILE = jsoncpp-doxygen-warning.log -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- -INPUT = ../include ../src/lib_json . -FILE_PATTERNS = *.h *.cpp *.dox -RECURSIVE = YES -EXCLUDE = -EXCLUDE_SYMLINKS = NO -EXCLUDE_PATTERNS = -EXAMPLE_PATH = -EXAMPLE_PATTERNS = * -EXAMPLE_RECURSIVE = NO -IMAGE_PATH = -INPUT_FILTER = -FILTER_PATTERNS = -FILTER_SOURCE_FILES = NO -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- -SOURCE_BROWSER = YES -INLINE_SOURCES = NO -STRIP_CODE_COMMENTS = YES -REFERENCED_BY_RELATION = YES -REFERENCES_RELATION = YES -USE_HTAGS = NO -VERBATIM_HEADERS = YES -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- -ALPHABETICAL_INDEX = NO -COLS_IN_ALPHA_INDEX = 5 -IGNORE_PREFIX = -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- -GENERATE_HTML = YES -HTML_OUTPUT = json-html-doc-%JSONCPP_VERSION% -HTML_FILE_EXTENSION = .html -HTML_HEADER = header.html -HTML_FOOTER = footer.html -HTML_STYLESHEET = -HTML_ALIGN_MEMBERS = YES -GENERATE_HTMLHELP = NO -CHM_FILE = jsoncpp.chm -HHC_LOCATION = -GENERATE_CHI = NO -BINARY_TOC = NO -TOC_EXPAND = NO -DISABLE_INDEX = NO -ENUM_VALUES_PER_LINE = 4 -GENERATE_TREEVIEW = NO -TREEVIEW_WIDTH = 250 -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- -GENERATE_LATEX = NO -LATEX_OUTPUT = latex -LATEX_CMD_NAME = latex -MAKEINDEX_CMD_NAME = makeindex -COMPACT_LATEX = NO -PAPER_TYPE = a4wide -EXTRA_PACKAGES = -LATEX_HEADER = -PDF_HYPERLINKS = NO -USE_PDFLATEX = NO -LATEX_BATCHMODE = NO -LATEX_HIDE_INDICES = NO -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- -GENERATE_RTF = NO -RTF_OUTPUT = rtf -COMPACT_RTF = NO -RTF_HYPERLINKS = NO -RTF_STYLESHEET_FILE = -RTF_EXTENSIONS_FILE = -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- -GENERATE_MAN = NO -MAN_OUTPUT = man -MAN_EXTENSION = .3 -MAN_LINKS = NO -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- -GENERATE_XML = NO -XML_OUTPUT = xml -XML_SCHEMA = -XML_DTD = -XML_PROGRAMLISTING = YES -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- -GENERATE_AUTOGEN_DEF = NO -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- -GENERATE_PERLMOD = NO -PERLMOD_LATEX = NO -PERLMOD_PRETTY = YES -PERLMOD_MAKEVAR_PREFIX = -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- -ENABLE_PREPROCESSING = YES -MACRO_EXPANSION = NO -EXPAND_ONLY_PREDEF = NO -SEARCH_INCLUDES = YES -INCLUDE_PATH = ../include -INCLUDE_FILE_PATTERNS = *.h -PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP -EXPAND_AS_DEFINED = -SKIP_FUNCTION_MACROS = YES -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- -TAGFILES = -GENERATE_TAGFILE = -ALLEXTERNALS = NO -EXTERNAL_GROUPS = YES -PERL_PATH = /usr/bin/perl -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- -CLASS_DIAGRAMS = NO -HIDE_UNDOC_RELATIONS = YES -HAVE_DOT = NO -CLASS_GRAPH = YES -COLLABORATION_GRAPH = YES -GROUP_GRAPHS = YES -UML_LOOK = NO -TEMPLATE_RELATIONS = NO -INCLUDE_GRAPH = YES -INCLUDED_BY_GRAPH = YES -CALL_GRAPH = NO -GRAPHICAL_HIERARCHY = YES -DIRECTORY_GRAPH = YES -DOT_IMAGE_FORMAT = png -DOT_PATH = -DOTFILE_DIRS = -MAX_DOT_GRAPH_WIDTH = 1024 -MAX_DOT_GRAPH_HEIGHT = 1024 -MAX_DOT_GRAPH_DEPTH = 1000 -DOT_TRANSPARENT = NO -DOT_MULTI_TARGETS = NO -GENERATE_LEGEND = YES -DOT_CLEANUP = YES -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- -SEARCHENGINE = NO +# Doxyfile 1.4.3 + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- +PROJECT_NAME = "JsonCpp" +PROJECT_NUMBER = %JSONCPP_VERSION% +OUTPUT_DIRECTORY = %DOC_TOPDIR% +CREATE_SUBDIRS = NO +OUTPUT_LANGUAGE = English +USE_WINDOWS_ENCODING = NO +BRIEF_MEMBER_DESC = YES +REPEAT_BRIEF = YES +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the +ALWAYS_DETAILED_SEC = NO +INLINE_INHERITED_MEMB = NO +FULL_PATH_NAMES = YES +STRIP_FROM_PATH = %TOPDIR% +STRIP_FROM_INC_PATH = %TOPDIR%/include +SHORT_NAMES = NO +JAVADOC_AUTOBRIEF = NO +MULTILINE_CPP_IS_BRIEF = NO +DETAILS_AT_TOP = NO +INHERIT_DOCS = YES +DISTRIBUTE_GROUP_DOC = NO +SEPARATE_MEMBER_PAGES = NO +TAB_SIZE = 3 +ALIASES = +OPTIMIZE_OUTPUT_FOR_C = NO +OPTIMIZE_OUTPUT_JAVA = NO +SUBGROUPING = YES +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- +EXTRACT_ALL = YES +EXTRACT_PRIVATE = NO +EXTRACT_STATIC = YES +EXTRACT_LOCAL_CLASSES = NO +EXTRACT_LOCAL_METHODS = NO +HIDE_UNDOC_MEMBERS = NO +HIDE_UNDOC_CLASSES = NO +HIDE_FRIEND_COMPOUNDS = NO +HIDE_IN_BODY_DOCS = NO +INTERNAL_DOCS = YES +CASE_SENSE_NAMES = NO +HIDE_SCOPE_NAMES = NO +SHOW_INCLUDE_FILES = YES +INLINE_INFO = YES +SORT_MEMBER_DOCS = YES +SORT_BRIEF_DOCS = NO +SORT_BY_SCOPE_NAME = NO +GENERATE_TODOLIST = YES +GENERATE_TESTLIST = YES +GENERATE_BUGLIST = YES +GENERATE_DEPRECATEDLIST= YES +ENABLED_SECTIONS = +MAX_INITIALIZER_LINES = 30 +SHOW_USED_FILES = YES +SHOW_DIRECTORIES = YES +FILE_VERSION_FILTER = +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- +QUIET = NO +WARNINGS = YES +WARN_IF_UNDOCUMENTED = YES +WARN_IF_DOC_ERROR = YES +WARN_NO_PARAMDOC = NO +WARN_FORMAT = "$file:$line: $text" +WARN_LOGFILE = jsoncpp-doxygen-warning.log +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- +INPUT = ../include ../src/lib_json . +FILE_PATTERNS = *.h *.cpp *.dox +RECURSIVE = YES +EXCLUDE = +EXCLUDE_SYMLINKS = NO +EXCLUDE_PATTERNS = +EXAMPLE_PATH = +EXAMPLE_PATTERNS = * +EXAMPLE_RECURSIVE = NO +IMAGE_PATH = +INPUT_FILTER = +FILTER_PATTERNS = +FILTER_SOURCE_FILES = NO +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- +SOURCE_BROWSER = YES +INLINE_SOURCES = NO +STRIP_CODE_COMMENTS = YES +REFERENCED_BY_RELATION = YES +REFERENCES_RELATION = YES +USE_HTAGS = NO +VERBATIM_HEADERS = YES +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- +ALPHABETICAL_INDEX = NO +COLS_IN_ALPHA_INDEX = 5 +IGNORE_PREFIX = +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- +GENERATE_HTML = YES +HTML_OUTPUT = json-html-doc-%JSONCPP_VERSION% +HTML_FILE_EXTENSION = .html +HTML_HEADER = header.html +HTML_FOOTER = footer.html +HTML_STYLESHEET = +HTML_ALIGN_MEMBERS = YES +GENERATE_HTMLHELP = NO +CHM_FILE = jsoncpp.chm +HHC_LOCATION = +GENERATE_CHI = NO +BINARY_TOC = NO +TOC_EXPAND = NO +DISABLE_INDEX = NO +ENUM_VALUES_PER_LINE = 4 +GENERATE_TREEVIEW = NO +TREEVIEW_WIDTH = 250 +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- +GENERATE_LATEX = NO +LATEX_OUTPUT = latex +LATEX_CMD_NAME = latex +MAKEINDEX_CMD_NAME = makeindex +COMPACT_LATEX = NO +PAPER_TYPE = a4wide +EXTRA_PACKAGES = +LATEX_HEADER = +PDF_HYPERLINKS = NO +USE_PDFLATEX = NO +LATEX_BATCHMODE = NO +LATEX_HIDE_INDICES = NO +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- +GENERATE_RTF = NO +RTF_OUTPUT = rtf +COMPACT_RTF = NO +RTF_HYPERLINKS = NO +RTF_STYLESHEET_FILE = +RTF_EXTENSIONS_FILE = +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- +GENERATE_MAN = NO +MAN_OUTPUT = man +MAN_EXTENSION = .3 +MAN_LINKS = NO +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- +GENERATE_XML = NO +XML_OUTPUT = xml +XML_SCHEMA = +XML_DTD = +XML_PROGRAMLISTING = YES +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- +GENERATE_AUTOGEN_DEF = NO +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- +GENERATE_PERLMOD = NO +PERLMOD_LATEX = NO +PERLMOD_PRETTY = YES +PERLMOD_MAKEVAR_PREFIX = +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- +ENABLE_PREPROCESSING = YES +MACRO_EXPANSION = NO +EXPAND_ONLY_PREDEF = NO +SEARCH_INCLUDES = YES +INCLUDE_PATH = ../include +INCLUDE_FILE_PATTERNS = *.h +PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP +EXPAND_AS_DEFINED = +SKIP_FUNCTION_MACROS = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- +TAGFILES = +GENERATE_TAGFILE = +ALLEXTERNALS = NO +EXTERNAL_GROUPS = YES +PERL_PATH = /usr/bin/perl +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- +CLASS_DIAGRAMS = NO +HIDE_UNDOC_RELATIONS = YES +HAVE_DOT = NO +CLASS_GRAPH = YES +COLLABORATION_GRAPH = YES +GROUP_GRAPHS = YES +UML_LOOK = NO +TEMPLATE_RELATIONS = NO +INCLUDE_GRAPH = YES +INCLUDED_BY_GRAPH = YES +CALL_GRAPH = NO +GRAPHICAL_HIERARCHY = YES +DIRECTORY_GRAPH = YES +DOT_IMAGE_FORMAT = png +DOT_PATH = +DOTFILE_DIRS = +MAX_DOT_GRAPH_WIDTH = 1024 +MAX_DOT_GRAPH_HEIGHT = 1024 +MAX_DOT_GRAPH_DEPTH = 1000 +DOT_TRANSPARENT = NO +DOT_MULTI_TARGETS = NO +GENERATE_LEGEND = YES +DOT_CLEANUP = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to the search engine +#--------------------------------------------------------------------------- +SEARCHENGINE = NO diff --git a/trunk/jsoncpp/doc/footer.html b/trunk/jsoncpp/doc/footer.html index 56df7a4..a61d952 100644 --- a/trunk/jsoncpp/doc/footer.html +++ b/trunk/jsoncpp/doc/footer.html @@ -1,23 +1,23 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/trunk/jsoncpp/doc/header.html b/trunk/jsoncpp/doc/header.html index 2288b04..d56ea59 100644 --- a/trunk/jsoncpp/doc/header.html +++ b/trunk/jsoncpp/doc/header.html @@ -1,24 +1,24 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
+ + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 3667fa6..34dda5e 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -1,97 +1,97 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - indent : { length : 3, use_space = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + indent : { length : 3, use_space = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/trunk/jsoncpp/doc/readme.txt b/trunk/jsoncpp/doc/readme.txt index 499422e..0e42cdf 100644 --- a/trunk/jsoncpp/doc/readme.txt +++ b/trunk/jsoncpp/doc/readme.txt @@ -1 +1 @@ -The documentation is generated using doxygen (http://www.doxygen.org). +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/trunk/jsoncpp/doc/sconscript b/trunk/jsoncpp/doc/sconscript index d2e27a7..dc29320 100644 --- a/trunk/jsoncpp/doc/sconscript +++ b/trunk/jsoncpp/doc/sconscript @@ -1,22 +1,22 @@ -Import( 'env' ) -import os.path - -if 'doxygen' in env['TOOLS']: - doc_topdir = env['ROOTBUILD_DIR'] - doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', - SUBST_DICT = { - '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], - '%TOPDIR%' : env.Dir('#').abspath, - '%DOC_TOPDIR%' : str(doc_topdir) } ) - doc_cmd = env.Doxygen( doxyfile ) - alias_doc_cmd = env.Alias('doc', doc_cmd ) - env.AlwaysBuild(alias_doc_cmd) - - for dir in doc_cmd: - env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) - filename = os.path.split(dir.path)[1] - targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) - zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], - TARGZ_BASEDIR = doc_topdir ) - env.Depends( zip_doc_cmd, alias_doc_cmd ) - env.Alias( 'doc-dist', zip_doc_cmd ) +Import( 'env' ) +import os.path + +if 'doxygen' in env['TOOLS']: + doc_topdir = env['ROOTBUILD_DIR'] + doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', + SUBST_DICT = { + '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], + '%TOPDIR%' : env.Dir('#').abspath, + '%DOC_TOPDIR%' : str(doc_topdir) } ) + doc_cmd = env.Doxygen( doxyfile ) + alias_doc_cmd = env.Alias('doc', doc_cmd ) + env.AlwaysBuild(alias_doc_cmd) + + for dir in doc_cmd: + env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) + filename = os.path.split(dir.path)[1] + targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) + zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], + TARGZ_BASEDIR = doc_topdir ) + env.Depends( zip_doc_cmd, alias_doc_cmd ) + env.Alias( 'doc-dist', zip_doc_cmd ) From 0396ea869870d6d1699ba417a8c7018df96cee76 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 19:32:40 +0000 Subject: [PATCH 053/268] LD_LIBRARY_PATH needed for linux build tests. (Really, passing -Wl,-rpath to the linker would be better, but scons is not good about automating that.) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@53 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index e32ed90..d0dfd80 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -77,6 +77,8 @@ elif platform == 'mingw': elif platform == 'linux-gcc': env.Tool( 'default' ) env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + os.environ['LD_LIBRARY_PATH'] = "%s:libs/linux-gcc" %LD_LIBRARY_PATH else: print "UNSUPPORTED PLATFORM." env.Exit(1) From 4ac094b26ed3aca02bae7d0c8250596742cc0f68 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 19:41:21 +0000 Subject: [PATCH 054/268] Without this file, scons must be run twice initially. Hopefully, the file gets rebuilt properly, as I do not know how to fix the scons rule correctly. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@54 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/doxyfile | 232 +++++++++++++++++++++++++++++++++++++ 1 file changed, 232 insertions(+) create mode 100644 trunk/jsoncpp/doc/doxyfile diff --git a/trunk/jsoncpp/doc/doxyfile b/trunk/jsoncpp/doc/doxyfile new file mode 100644 index 0000000..982347c --- /dev/null +++ b/trunk/jsoncpp/doc/doxyfile @@ -0,0 +1,232 @@ +# Doxyfile 1.4.3 + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- +PROJECT_NAME = "JsonCpp" +PROJECT_NUMBER = 0.1 +OUTPUT_DIRECTORY = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp/buildscons +CREATE_SUBDIRS = NO +OUTPUT_LANGUAGE = English +USE_WINDOWS_ENCODING = NO +BRIEF_MEMBER_DESC = YES +REPEAT_BRIEF = YES +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the +ALWAYS_DETAILED_SEC = NO +INLINE_INHERITED_MEMB = NO +FULL_PATH_NAMES = YES +STRIP_FROM_PATH = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp +STRIP_FROM_INC_PATH = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp/include +SHORT_NAMES = NO +JAVADOC_AUTOBRIEF = NO +MULTILINE_CPP_IS_BRIEF = NO +DETAILS_AT_TOP = NO +INHERIT_DOCS = YES +DISTRIBUTE_GROUP_DOC = NO +SEPARATE_MEMBER_PAGES = NO +TAB_SIZE = 3 +ALIASES = +OPTIMIZE_OUTPUT_FOR_C = NO +OPTIMIZE_OUTPUT_JAVA = NO +SUBGROUPING = YES +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- +EXTRACT_ALL = YES +EXTRACT_PRIVATE = NO +EXTRACT_STATIC = YES +EXTRACT_LOCAL_CLASSES = NO +EXTRACT_LOCAL_METHODS = NO +HIDE_UNDOC_MEMBERS = NO +HIDE_UNDOC_CLASSES = NO +HIDE_FRIEND_COMPOUNDS = NO +HIDE_IN_BODY_DOCS = NO +INTERNAL_DOCS = YES +CASE_SENSE_NAMES = NO +HIDE_SCOPE_NAMES = NO +SHOW_INCLUDE_FILES = YES +INLINE_INFO = YES +SORT_MEMBER_DOCS = YES +SORT_BRIEF_DOCS = NO +SORT_BY_SCOPE_NAME = NO +GENERATE_TODOLIST = YES +GENERATE_TESTLIST = YES +GENERATE_BUGLIST = YES +GENERATE_DEPRECATEDLIST= YES +ENABLED_SECTIONS = +MAX_INITIALIZER_LINES = 30 +SHOW_USED_FILES = YES +SHOW_DIRECTORIES = YES +FILE_VERSION_FILTER = +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- +QUIET = NO +WARNINGS = YES +WARN_IF_UNDOCUMENTED = YES +WARN_IF_DOC_ERROR = YES +WARN_NO_PARAMDOC = NO +WARN_FORMAT = "$file:$line: $text" +WARN_LOGFILE = jsoncpp-doxygen-warning.log +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- +INPUT = ../include ../src/lib_json . +FILE_PATTERNS = *.h *.cpp *.dox +RECURSIVE = YES +EXCLUDE = +EXCLUDE_SYMLINKS = NO +EXCLUDE_PATTERNS = +EXAMPLE_PATH = +EXAMPLE_PATTERNS = * +EXAMPLE_RECURSIVE = NO +IMAGE_PATH = +INPUT_FILTER = +FILTER_PATTERNS = +FILTER_SOURCE_FILES = NO +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- +SOURCE_BROWSER = YES +INLINE_SOURCES = NO +STRIP_CODE_COMMENTS = YES +REFERENCED_BY_RELATION = YES +REFERENCES_RELATION = YES +USE_HTAGS = NO +VERBATIM_HEADERS = YES +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- +ALPHABETICAL_INDEX = NO +COLS_IN_ALPHA_INDEX = 5 +IGNORE_PREFIX = +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- +GENERATE_HTML = YES +HTML_OUTPUT = json-html-doc-0.1 +HTML_FILE_EXTENSION = .html +HTML_HEADER = header.html +HTML_FOOTER = footer.html +HTML_STYLESHEET = +HTML_ALIGN_MEMBERS = YES +GENERATE_HTMLHELP = NO +CHM_FILE = jsoncpp.chm +HHC_LOCATION = +GENERATE_CHI = NO +BINARY_TOC = NO +TOC_EXPAND = NO +DISABLE_INDEX = NO +ENUM_VALUES_PER_LINE = 4 +GENERATE_TREEVIEW = NO +TREEVIEW_WIDTH = 250 +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- +GENERATE_LATEX = NO +LATEX_OUTPUT = latex +LATEX_CMD_NAME = latex +MAKEINDEX_CMD_NAME = makeindex +COMPACT_LATEX = NO +PAPER_TYPE = a4wide +EXTRA_PACKAGES = +LATEX_HEADER = +PDF_HYPERLINKS = NO +USE_PDFLATEX = NO +LATEX_BATCHMODE = NO +LATEX_HIDE_INDICES = NO +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- +GENERATE_RTF = NO +RTF_OUTPUT = rtf +COMPACT_RTF = NO +RTF_HYPERLINKS = NO +RTF_STYLESHEET_FILE = +RTF_EXTENSIONS_FILE = +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- +GENERATE_MAN = NO +MAN_OUTPUT = man +MAN_EXTENSION = .3 +MAN_LINKS = NO +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- +GENERATE_XML = NO +XML_OUTPUT = xml +XML_SCHEMA = +XML_DTD = +XML_PROGRAMLISTING = YES +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- +GENERATE_AUTOGEN_DEF = NO +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- +GENERATE_PERLMOD = NO +PERLMOD_LATEX = NO +PERLMOD_PRETTY = YES +PERLMOD_MAKEVAR_PREFIX = +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- +ENABLE_PREPROCESSING = YES +MACRO_EXPANSION = NO +EXPAND_ONLY_PREDEF = NO +SEARCH_INCLUDES = YES +INCLUDE_PATH = ../include +INCLUDE_FILE_PATTERNS = *.h +PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP +EXPAND_AS_DEFINED = +SKIP_FUNCTION_MACROS = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- +TAGFILES = +GENERATE_TAGFILE = +ALLEXTERNALS = NO +EXTERNAL_GROUPS = YES +PERL_PATH = /usr/bin/perl +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- +CLASS_DIAGRAMS = NO +HIDE_UNDOC_RELATIONS = YES +HAVE_DOT = NO +CLASS_GRAPH = YES +COLLABORATION_GRAPH = YES +GROUP_GRAPHS = YES +UML_LOOK = NO +TEMPLATE_RELATIONS = NO +INCLUDE_GRAPH = YES +INCLUDED_BY_GRAPH = YES +CALL_GRAPH = NO +GRAPHICAL_HIERARCHY = YES +DIRECTORY_GRAPH = YES +DOT_IMAGE_FORMAT = png +DOT_PATH = +DOTFILE_DIRS = +MAX_DOT_GRAPH_WIDTH = 1024 +MAX_DOT_GRAPH_HEIGHT = 1024 +MAX_DOT_GRAPH_DEPTH = 1000 +DOT_TRANSPARENT = NO +DOT_MULTI_TARGETS = NO +GENERATE_LEGEND = YES +DOT_CLEANUP = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to the search engine +#--------------------------------------------------------------------------- +SEARCHENGINE = NO From 9b8814f204e2a746a2f07d8b8455a591fb52e98c Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 20:59:51 +0000 Subject: [PATCH 055/268] Updated platform on linux-gcc to include compiler version (necessary for multiple targets built in same directory, and not a bad idea for other builds). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@55 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index d0dfd80..8167484 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -14,6 +14,16 @@ options.Add( EnumOption('platform', try: platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH except KeyError: print 'You must specify a "platform"' sys.exit(2) @@ -74,11 +84,9 @@ elif platform == 'msvc80': elif platform == 'mingw': env.Tool( 'mingw' ) env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform == 'linux-gcc': +elif platform.startswith('linux-gcc'): env.Tool( 'default' ) env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - os.environ['LD_LIBRARY_PATH'] = "%s:libs/linux-gcc" %LD_LIBRARY_PATH else: print "UNSUPPORTED PLATFORM." env.Exit(1) @@ -174,3 +182,9 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'doc' ) + +# libs was happening before bin by chance, I think. When I added +# the compiler version to linux-gcc, the order changed. This +# fixes it (I believe). +env.Depends('bin', 'libs') +env.Depends('check', 'bin') From ef028585f9c1b5c23c6f96d0e43e1ebb1db799b3 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 21:01:26 +0000 Subject: [PATCH 056/268] Stripped carriage return and added eol-style native prop. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@56 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/AUTHORS | 2 +- trunk/jsoncpp/README.txt | 88 ++-- trunk/jsoncpp/scons-tools/doxygen.py | 410 +++++++++--------- trunk/jsoncpp/scons-tools/srcdist.py | 358 +++++++-------- trunk/jsoncpp/scons-tools/substinfile.py | 158 +++---- trunk/jsoncpp/scons-tools/targz.py | 70 +-- trunk/jsoncpp/src/jsontestrunner/main.cpp | 374 ++++++++-------- trunk/jsoncpp/src/jsontestrunner/sconscript | 12 +- trunk/jsoncpp/test/cleantests.py | 20 +- trunk/jsoncpp/test/generate_expected.py | 22 +- trunk/jsoncpp/test/jsontestrunner.py | 128 +++--- trunk/jsoncpp/test/runjsontests.py | 180 ++++---- trunk/jsoncpp/test/test_array_01.expected | 2 +- trunk/jsoncpp/test/test_array_01.json | 2 +- trunk/jsoncpp/test/test_array_02.expected | 4 +- trunk/jsoncpp/test/test_array_02.json | 2 +- trunk/jsoncpp/test/test_array_03.expected | 12 +- trunk/jsoncpp/test/test_array_03.json | 2 +- trunk/jsoncpp/test/test_array_04.expected | 10 +- trunk/jsoncpp/test/test_array_04.json | 2 +- trunk/jsoncpp/test/test_array_05.expected | 200 ++++----- trunk/jsoncpp/test/test_array_06.expected | 10 +- trunk/jsoncpp/test/test_array_06.json | 6 +- trunk/jsoncpp/test/test_basic_01.expected | 2 +- trunk/jsoncpp/test/test_basic_01.json | 2 +- trunk/jsoncpp/test/test_basic_02.expected | 2 +- trunk/jsoncpp/test/test_basic_02.json | 2 +- trunk/jsoncpp/test/test_basic_03.expected | 6 +- trunk/jsoncpp/test/test_basic_03.json | 6 +- trunk/jsoncpp/test/test_basic_04.expected | 4 +- trunk/jsoncpp/test/test_basic_04.json | 4 +- trunk/jsoncpp/test/test_basic_05.expected | 4 +- trunk/jsoncpp/test/test_basic_05.json | 4 +- trunk/jsoncpp/test/test_basic_06.expected | 4 +- trunk/jsoncpp/test/test_basic_06.json | 4 +- trunk/jsoncpp/test/test_basic_07.expected | 4 +- trunk/jsoncpp/test/test_basic_07.json | 4 +- trunk/jsoncpp/test/test_basic_08.expected | 4 +- trunk/jsoncpp/test/test_basic_08.json | 6 +- trunk/jsoncpp/test/test_basic_09.expected | 4 +- trunk/jsoncpp/test/test_basic_09.json | 8 +- trunk/jsoncpp/test/test_complex_01.expected | 40 +- trunk/jsoncpp/test/test_complex_01.json | 34 +- trunk/jsoncpp/test/test_integer_01.expected | 2 +- trunk/jsoncpp/test/test_integer_01.json | 4 +- trunk/jsoncpp/test/test_integer_02.expected | 2 +- trunk/jsoncpp/test/test_integer_02.json | 4 +- trunk/jsoncpp/test/test_integer_03.expected | 2 +- trunk/jsoncpp/test/test_integer_03.json | 4 +- trunk/jsoncpp/test/test_integer_04.expected | 4 +- trunk/jsoncpp/test/test_integer_04.json | 6 +- trunk/jsoncpp/test/test_integer_05.expected | 4 +- trunk/jsoncpp/test/test_integer_05.json | 4 +- trunk/jsoncpp/test/test_object_01.expected | 2 +- trunk/jsoncpp/test/test_object_01.json | 2 +- trunk/jsoncpp/test/test_object_02.expected | 4 +- trunk/jsoncpp/test/test_object_02.json | 2 +- trunk/jsoncpp/test/test_object_03.expected | 8 +- trunk/jsoncpp/test/test_object_03.json | 10 +- trunk/jsoncpp/test/test_object_04.expected | 4 +- trunk/jsoncpp/test/test_object_04.json | 6 +- .../test/test_preserve_comment_01.expected | 6 +- .../test/test_preserve_comment_01.json | 28 +- trunk/jsoncpp/test/test_real_01.expected | 4 +- trunk/jsoncpp/test/test_real_01.json | 6 +- trunk/jsoncpp/test/test_real_02.expected | 4 +- trunk/jsoncpp/test/test_real_02.json | 6 +- trunk/jsoncpp/test/test_real_03.expected | 4 +- trunk/jsoncpp/test/test_real_03.json | 6 +- trunk/jsoncpp/test/test_real_04.expected | 4 +- trunk/jsoncpp/test/test_real_04.json | 6 +- trunk/jsoncpp/test/test_real_05.expected | 6 +- trunk/jsoncpp/test/test_real_05.json | 6 +- trunk/jsoncpp/test/test_real_06.expected | 6 +- trunk/jsoncpp/test/test_real_06.json | 6 +- trunk/jsoncpp/test/test_real_07.expected | 6 +- trunk/jsoncpp/test/test_real_07.json | 6 +- 77 files changed, 1203 insertions(+), 1203 deletions(-) diff --git a/trunk/jsoncpp/AUTHORS b/trunk/jsoncpp/AUTHORS index 333e120..c0fbbee 100644 --- a/trunk/jsoncpp/AUTHORS +++ b/trunk/jsoncpp/AUTHORS @@ -1 +1 @@ -Baptiste Lepilleur +Baptiste Lepilleur diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 65d3629..f3aaa8e 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -1,44 +1,44 @@ -* Introduction: - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, and handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - -* Building/Testing: - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - doc: build documentation - doc-dist: build documentation tarball - +* Introduction: + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, and handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + +* Building/Testing: + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + doc: build documentation + doc-dist: build documentation tarball + diff --git a/trunk/jsoncpp/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py index f85f4a3..7b8dee4 100644 --- a/trunk/jsoncpp/scons-tools/doxygen.py +++ b/trunk/jsoncpp/scons-tools/doxygen.py @@ -1,205 +1,205 @@ -# Big issue: -# emitter depends on doxyfile which is generated from doxyfile.in. -# build fails after cleaning and relaunching the build. - -import os -import os.path -import glob -from fnmatch import fnmatch - -def DoxyfileParse(file_contents): - """ - Parse a Doxygen source file and return a dictionary of all the values. - Values will be strings and lists of strings. - """ - data = {} - - import shlex - lex = shlex.shlex(instream = file_contents, posix = True) - lex.wordchars += "*+./-:" - lex.whitespace = lex.whitespace.replace("\n", "") - lex.escape = "" - - lineno = lex.lineno - last_backslash_lineno = lineno - token = lex.get_token() - key = token # the first token should be a key - last_token = "" - key_token = False - next_key = False - new_data = True - - def append_data(data, key, new_data, token): - if new_data or len(data[key]) == 0: - data[key].append(token) - else: - data[key][-1] += token - - while token: - if token in ['\n']: - if last_token not in ['\\']: - key_token = True - elif token in ['\\']: - pass - elif key_token: - key = token - key_token = False - else: - if token == "+=": - if not data.has_key(key): - data[key] = list() - elif token == "=": - data[key] = list() - else: - append_data( data, key, new_data, token ) - new_data = True - - last_token = token - token = lex.get_token() - - if last_token == '\\' and token != '\n': - new_data = False - append_data( data, key, new_data, '\\' ) - - # compress lists of len 1 into single strings - for (k, v) in data.items(): - if len(v) == 0: - data.pop(k) - - # items in the following list will be kept as lists and not converted to strings - if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: - continue - - if len(v) == 1: - data[k] = v[0] - - return data - -def DoxySourceScan(node, env, path): - """ - Doxygen Doxyfile source scanner. This should scan the Doxygen file and add - any files used to generate docs to the list of source files. - """ - default_file_patterns = [ - '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', - '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', - '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', - '*.py', - ] - - default_exclude_patterns = [ - '*~', - ] - - sources = [] - - data = DoxyfileParse(node.get_contents()) - - if data.get("RECURSIVE", "NO") == "YES": - recursive = True - else: - recursive = False - - file_patterns = data.get("FILE_PATTERNS", default_file_patterns) - exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) - - doxyfile_dir = str( node.dir ) - -## print 'running from', os.getcwd() - for node in data.get("INPUT", []): - node_real_path = os.path.normpath( os.path.join( doxyfile_dir, node ) ) - if os.path.isfile(node_real_path): -## print str(node), 'is a file' - sources.append(node) - elif os.path.isdir(node_real_path): -## print str(node), 'is a directory' - if recursive: - for root, dirs, files in os.walk(node): - for f in files: - filename = os.path.join(root, f) - - pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) - exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) - - if pattern_check and not exclude_check: - sources.append(filename) -## print ' adding source', os.path.abspath( filename ) - else: - for pattern in file_patterns: - sources.extend(glob.glob(os.path.join( node, pattern))) -## else: -## print str(node), 'is neither a file nor a directory' - sources = map( lambda path: env.File(path), sources ) - return sources - - -def DoxySourceScanCheck(node, env): - """Check if we should scan this file""" - return os.path.isfile(node.path) - -def DoxyEmitter(source, target, env): - """Doxygen Doxyfile emitter""" - # possible output formats and their default values and output locations - output_formats = { - "HTML": ("YES", "html"), - "LATEX": ("YES", "latex"), - "RTF": ("NO", "rtf"), - "MAN": ("YES", "man"), - "XML": ("NO", "xml"), - } - -## print '#### DoxyEmitter:', source[0].abspath, os.path.exists( source[0].abspath ) - data = DoxyfileParse(source[0].get_contents()) - - targets = [] - out_dir = data.get("OUTPUT_DIRECTORY", ".") - - # add our output locations - for (k, v) in output_formats.items(): - if data.get("GENERATE_" + k, v[0]) == "YES": - targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) - - # don't clobber targets - for node in targets: - env.Precious(node) - - # set up cleaning stuff - for node in targets: - clean_cmd = env.Clean(node, node) - env.Depends( clean_cmd, source ) - - return (targets, source) - -def generate(env): - """ - Add builders and construction variables for the - Doxygen tool. This is currently for Doxygen 1.4.6. - """ - doxyfile_scanner = env.Scanner( - DoxySourceScan, - "DoxySourceScan", - scan_check = DoxySourceScanCheck, - ) - - doxyfile_builder = env.Builder( - action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}", - varlist=['$SOURCES']), - emitter = DoxyEmitter, - target_factory = env.fs.Entry, - single_source = True, - source_scanner = doxyfile_scanner, - ) - - env.Append(BUILDERS = { - 'Doxygen': doxyfile_builder, - }) - - env.AppendUnique( - DOXYGEN = 'doxygen', - ) - -def exists(env): - """ - Make sure doxygen exists. - """ - return env.Detect("doxygen") +# Big issue: +# emitter depends on doxyfile which is generated from doxyfile.in. +# build fails after cleaning and relaunching the build. + +import os +import os.path +import glob +from fnmatch import fnmatch + +def DoxyfileParse(file_contents): + """ + Parse a Doxygen source file and return a dictionary of all the values. + Values will be strings and lists of strings. + """ + data = {} + + import shlex + lex = shlex.shlex(instream = file_contents, posix = True) + lex.wordchars += "*+./-:" + lex.whitespace = lex.whitespace.replace("\n", "") + lex.escape = "" + + lineno = lex.lineno + last_backslash_lineno = lineno + token = lex.get_token() + key = token # the first token should be a key + last_token = "" + key_token = False + next_key = False + new_data = True + + def append_data(data, key, new_data, token): + if new_data or len(data[key]) == 0: + data[key].append(token) + else: + data[key][-1] += token + + while token: + if token in ['\n']: + if last_token not in ['\\']: + key_token = True + elif token in ['\\']: + pass + elif key_token: + key = token + key_token = False + else: + if token == "+=": + if not data.has_key(key): + data[key] = list() + elif token == "=": + data[key] = list() + else: + append_data( data, key, new_data, token ) + new_data = True + + last_token = token + token = lex.get_token() + + if last_token == '\\' and token != '\n': + new_data = False + append_data( data, key, new_data, '\\' ) + + # compress lists of len 1 into single strings + for (k, v) in data.items(): + if len(v) == 0: + data.pop(k) + + # items in the following list will be kept as lists and not converted to strings + if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: + continue + + if len(v) == 1: + data[k] = v[0] + + return data + +def DoxySourceScan(node, env, path): + """ + Doxygen Doxyfile source scanner. This should scan the Doxygen file and add + any files used to generate docs to the list of source files. + """ + default_file_patterns = [ + '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', + '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', + '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', + '*.py', + ] + + default_exclude_patterns = [ + '*~', + ] + + sources = [] + + data = DoxyfileParse(node.get_contents()) + + if data.get("RECURSIVE", "NO") == "YES": + recursive = True + else: + recursive = False + + file_patterns = data.get("FILE_PATTERNS", default_file_patterns) + exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) + + doxyfile_dir = str( node.dir ) + +## print 'running from', os.getcwd() + for node in data.get("INPUT", []): + node_real_path = os.path.normpath( os.path.join( doxyfile_dir, node ) ) + if os.path.isfile(node_real_path): +## print str(node), 'is a file' + sources.append(node) + elif os.path.isdir(node_real_path): +## print str(node), 'is a directory' + if recursive: + for root, dirs, files in os.walk(node): + for f in files: + filename = os.path.join(root, f) + + pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) + exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) + + if pattern_check and not exclude_check: + sources.append(filename) +## print ' adding source', os.path.abspath( filename ) + else: + for pattern in file_patterns: + sources.extend(glob.glob(os.path.join( node, pattern))) +## else: +## print str(node), 'is neither a file nor a directory' + sources = map( lambda path: env.File(path), sources ) + return sources + + +def DoxySourceScanCheck(node, env): + """Check if we should scan this file""" + return os.path.isfile(node.path) + +def DoxyEmitter(source, target, env): + """Doxygen Doxyfile emitter""" + # possible output formats and their default values and output locations + output_formats = { + "HTML": ("YES", "html"), + "LATEX": ("YES", "latex"), + "RTF": ("NO", "rtf"), + "MAN": ("YES", "man"), + "XML": ("NO", "xml"), + } + +## print '#### DoxyEmitter:', source[0].abspath, os.path.exists( source[0].abspath ) + data = DoxyfileParse(source[0].get_contents()) + + targets = [] + out_dir = data.get("OUTPUT_DIRECTORY", ".") + + # add our output locations + for (k, v) in output_formats.items(): + if data.get("GENERATE_" + k, v[0]) == "YES": + targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) + + # don't clobber targets + for node in targets: + env.Precious(node) + + # set up cleaning stuff + for node in targets: + clean_cmd = env.Clean(node, node) + env.Depends( clean_cmd, source ) + + return (targets, source) + +def generate(env): + """ + Add builders and construction variables for the + Doxygen tool. This is currently for Doxygen 1.4.6. + """ + doxyfile_scanner = env.Scanner( + DoxySourceScan, + "DoxySourceScan", + scan_check = DoxySourceScanCheck, + ) + + doxyfile_builder = env.Builder( + action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}", + varlist=['$SOURCES']), + emitter = DoxyEmitter, + target_factory = env.fs.Entry, + single_source = True, + source_scanner = doxyfile_scanner, + ) + + env.Append(BUILDERS = { + 'Doxygen': doxyfile_builder, + }) + + env.AppendUnique( + DOXYGEN = 'doxygen', + ) + +def exists(env): + """ + Make sure doxygen exists. + """ + return env.Detect("doxygen") diff --git a/trunk/jsoncpp/scons-tools/srcdist.py b/trunk/jsoncpp/scons-tools/srcdist.py index cfc5407..17f029f 100644 --- a/trunk/jsoncpp/scons-tools/srcdist.py +++ b/trunk/jsoncpp/scons-tools/srcdist.py @@ -1,179 +1,179 @@ -import os -import os.path -import glob -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return True +import os +import os.path +import glob +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return True diff --git a/trunk/jsoncpp/scons-tools/substinfile.py b/trunk/jsoncpp/scons-tools/substinfile.py index 2502262..4d30585 100644 --- a/trunk/jsoncpp/scons-tools/substinfile.py +++ b/trunk/jsoncpp/scons-tools/substinfile.py @@ -1,79 +1,79 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/trunk/jsoncpp/scons-tools/targz.py b/trunk/jsoncpp/scons-tools/targz.py index 2f21204..a655b11 100644 --- a/trunk/jsoncpp/scons-tools/targz.py +++ b/trunk/jsoncpp/scons-tools/targz.py @@ -3,75 +3,75 @@ Tool-specific initialization for tarball. """ - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz import os.path import SCons.Builder import SCons.Node.FS import SCons.Util - + try: - import gzip + import gzip import tarfile internal_targz = 1 except ImportError: internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name def visit(tar, dirname, names): for name in names: path = os.path.join(dirname, name) if os.path.isfile(path): tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) fileobj = gzip.GzipFile( target_path, 'wb', compression ) tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: + for source in source: source_path = str(source) if source.isdir(): os.path.walk(source_path, visit, tar) - else: + else: tar.add(source_path, archive_name(source_path) ) # filename, arcname tar.close() targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - + def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) TarGzBuilder = makeBuilder() def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_SUFFIX'] = '.tar.gz' env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. def exists(env): diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 1d8b303..88ed2f1 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -1,187 +1,187 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root ) -{ - Json::Reader reader; - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -int main( int argc, const char *argv[] ) -{ - if ( argc != 2 ) - { - printf( "Usage: %s input-json-file", argv[0] ); - return 3; - } - - std::string input = readInputTestFile( argv[1] ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", argv[1] ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", argv[1] ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - int exitCode = parseAndSaveValueTree( input, actualPath, "input", root ); - if ( exitCode == 0 ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, "rewrite", rewriteRoot ); - } - } - - return exitCode; -} - +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root ) +{ + Json::Reader reader; + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +int main( int argc, const char *argv[] ) +{ + if ( argc != 2 ) + { + printf( "Usage: %s input-json-file", argv[0] ); + return 3; + } + + std::string input = readInputTestFile( argv[1] ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", argv[1] ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", argv[1] ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + int exitCode = parseAndSaveValueTree( input, actualPath, "input", root ); + if ( exitCode == 0 ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, "rewrite", rewriteRoot ); + } + } + + return exitCode; +} + diff --git a/trunk/jsoncpp/src/jsontestrunner/sconscript b/trunk/jsoncpp/src/jsontestrunner/sconscript index f81a2dc..e9fe37f 100644 --- a/trunk/jsoncpp/src/jsontestrunner/sconscript +++ b/trunk/jsoncpp/src/jsontestrunner/sconscript @@ -1,6 +1,6 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) diff --git a/trunk/jsoncpp/test/cleantests.py b/trunk/jsoncpp/test/cleantests.py index 5872a87..e5f99e6 100644 --- a/trunk/jsoncpp/test/cleantests.py +++ b/trunk/jsoncpp/test/cleantests.py @@ -1,10 +1,10 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( pattern ) - -for path in paths: - os.unlink( path ) +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( pattern ) + +for path in paths: + os.unlink( path ) diff --git a/trunk/jsoncpp/test/generate_expected.py b/trunk/jsoncpp/test/generate_expected.py index a46e889..5b215c4 100644 --- a/trunk/jsoncpp/test/generate_expected.py +++ b/trunk/jsoncpp/test/generate_expected.py @@ -1,11 +1,11 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/trunk/jsoncpp/test/jsontestrunner.py b/trunk/jsoncpp/test/jsontestrunner.py index ec05a91..a076d0c 100644 --- a/trunk/jsoncpp/test/jsontestrunner.py +++ b/trunk/jsoncpp/test/jsontestrunner.py @@ -1,64 +1,64 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.read( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.write( value ) - rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.read( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.write( value ) + rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index de7bd9d..38bfd6e 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -1,91 +1,91 @@ -import sys -import os -import os.path -import glob - - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None ): - if not input_dir: - input_dir = os.getcwd() - tests = glob.glob( os.path.join( input_dir, '*.json' ) ) - failed_tests = [] - for input_path in tests: - print 'TESTING:', input_path, - pipe = os.popen( "%s %s" % (jsontest_executable_path, input_path) ) - process_output = pipe.read() - status = pipe.close() - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -if __name__ == '__main__': - if len(sys.argv) < 1 or len(sys.argv) > 2: - print "Usage: %s jsontest-executable-path [input-testcase-directory]" % sys.argv[0] - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( sys.argv[1] ) ) - if len(sys.argv) > 2: - input_path = os.path.normpath( os.path.abspath( sys.argv[2] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path ) +import sys +import os +import os.path +import glob + + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None ): + if not input_dir: + input_dir = os.getcwd() + tests = glob.glob( os.path.join( input_dir, '*.json' ) ) + failed_tests = [] + for input_path in tests: + print 'TESTING:', input_path, + pipe = os.popen( "%s %s" % (jsontest_executable_path, input_path) ) + process_output = pipe.read() + status = pipe.close() + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +if __name__ == '__main__': + if len(sys.argv) < 1 or len(sys.argv) > 2: + print "Usage: %s jsontest-executable-path [input-testcase-directory]" % sys.argv[0] + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( sys.argv[1] ) ) + if len(sys.argv) > 2: + input_path = os.path.normpath( os.path.abspath( sys.argv[2] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path ) sys.exit( status ) \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_array_01.expected b/trunk/jsoncpp/test/test_array_01.expected index 4aa8fb3..a341ff7 100644 --- a/trunk/jsoncpp/test/test_array_01.expected +++ b/trunk/jsoncpp/test/test_array_01.expected @@ -1 +1 @@ -.=[] +.=[] diff --git a/trunk/jsoncpp/test/test_array_01.json b/trunk/jsoncpp/test/test_array_01.json index 60b0742..fe51488 100644 --- a/trunk/jsoncpp/test/test_array_01.json +++ b/trunk/jsoncpp/test/test_array_01.json @@ -1 +1 @@ -[] +[] diff --git a/trunk/jsoncpp/test/test_array_02.expected b/trunk/jsoncpp/test/test_array_02.expected index 5b7c72a..ef1f262 100644 --- a/trunk/jsoncpp/test/test_array_02.expected +++ b/trunk/jsoncpp/test/test_array_02.expected @@ -1,2 +1,2 @@ -.=[] -.[0]=1 +.=[] +.[0]=1 diff --git a/trunk/jsoncpp/test/test_array_02.json b/trunk/jsoncpp/test/test_array_02.json index c02be12..7660873 100644 --- a/trunk/jsoncpp/test/test_array_02.json +++ b/trunk/jsoncpp/test/test_array_02.json @@ -1 +1 @@ -[1] +[1] diff --git a/trunk/jsoncpp/test/test_array_03.expected b/trunk/jsoncpp/test/test_array_03.expected index 0ba568e..3d8dc18 100644 --- a/trunk/jsoncpp/test/test_array_03.expected +++ b/trunk/jsoncpp/test/test_array_03.expected @@ -1,6 +1,6 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/trunk/jsoncpp/test/test_array_03.json b/trunk/jsoncpp/test/test_array_03.json index ac8f422..9b3f924 100644 --- a/trunk/jsoncpp/test/test_array_03.json +++ b/trunk/jsoncpp/test/test_array_03.json @@ -1 +1 @@ -[ 1, 2 , 3,4,5] +[ 1, 2 , 3,4,5] diff --git a/trunk/jsoncpp/test/test_array_04.expected b/trunk/jsoncpp/test/test_array_04.expected index db58c30..ad4add9 100644 --- a/trunk/jsoncpp/test/test_array_04.expected +++ b/trunk/jsoncpp/test/test_array_04.expected @@ -1,5 +1,5 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/trunk/jsoncpp/test/test_array_04.json b/trunk/jsoncpp/test/test_array_04.json index 0755478..ecca546 100644 --- a/trunk/jsoncpp/test/test_array_04.json +++ b/trunk/jsoncpp/test/test_array_04.json @@ -1 +1 @@ -[1, "abc" , 12.3, -4] +[1, "abc" , 12.3, -4] diff --git a/trunk/jsoncpp/test/test_array_05.expected b/trunk/jsoncpp/test/test_array_05.expected index 82ad484..76cff87 100644 --- a/trunk/jsoncpp/test/test_array_05.expected +++ b/trunk/jsoncpp/test/test_array_05.expected @@ -1,100 +1,100 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/trunk/jsoncpp/test/test_array_06.expected b/trunk/jsoncpp/test/test_array_06.expected index e087b63..5c9f48e 100644 --- a/trunk/jsoncpp/test/test_array_06.expected +++ b/trunk/jsoncpp/test/test_array_06.expected @@ -1,5 +1,5 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/trunk/jsoncpp/test/test_array_06.json b/trunk/jsoncpp/test/test_array_06.json index 9777a64..7f6c516 100644 --- a/trunk/jsoncpp/test/test_array_06.json +++ b/trunk/jsoncpp/test/test_array_06.json @@ -1,4 +1,4 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_basic_01.expected b/trunk/jsoncpp/test/test_basic_01.expected index 0527387..d761fce 100644 --- a/trunk/jsoncpp/test/test_basic_01.expected +++ b/trunk/jsoncpp/test/test_basic_01.expected @@ -1 +1 @@ -.=123456789 +.=123456789 diff --git a/trunk/jsoncpp/test/test_basic_01.json b/trunk/jsoncpp/test/test_basic_01.json index 57cf9b9..11f11f9 100644 --- a/trunk/jsoncpp/test/test_basic_01.json +++ b/trunk/jsoncpp/test/test_basic_01.json @@ -1 +1 @@ -0123456789 +0123456789 diff --git a/trunk/jsoncpp/test/test_basic_02.expected b/trunk/jsoncpp/test/test_basic_02.expected index 9040e84..650e37c 100644 --- a/trunk/jsoncpp/test/test_basic_02.expected +++ b/trunk/jsoncpp/test/test_basic_02.expected @@ -1 +1 @@ -.=-123456789 +.=-123456789 diff --git a/trunk/jsoncpp/test/test_basic_02.json b/trunk/jsoncpp/test/test_basic_02.json index fe84da4..bf11bce 100644 --- a/trunk/jsoncpp/test/test_basic_02.json +++ b/trunk/jsoncpp/test/test_basic_02.json @@ -1 +1 @@ --0123456789 +-0123456789 diff --git a/trunk/jsoncpp/test/test_basic_03.expected b/trunk/jsoncpp/test/test_basic_03.expected index 494278d..1da2d39 100644 --- a/trunk/jsoncpp/test/test_basic_03.expected +++ b/trunk/jsoncpp/test/test_basic_03.expected @@ -1,3 +1,3 @@ -.=1.2345678 - - +.=1.2345678 + + diff --git a/trunk/jsoncpp/test/test_basic_03.json b/trunk/jsoncpp/test/test_basic_03.json index feac150..a92b6bd 100644 --- a/trunk/jsoncpp/test/test_basic_03.json +++ b/trunk/jsoncpp/test/test_basic_03.json @@ -1,3 +1,3 @@ -1.2345678 - - +1.2345678 + + diff --git a/trunk/jsoncpp/test/test_basic_04.expected b/trunk/jsoncpp/test/test_basic_04.expected index 659f744..013f424 100644 --- a/trunk/jsoncpp/test/test_basic_04.expected +++ b/trunk/jsoncpp/test/test_basic_04.expected @@ -1,2 +1,2 @@ -.="abcdef" - +.="abcdef" + diff --git a/trunk/jsoncpp/test/test_basic_04.json b/trunk/jsoncpp/test/test_basic_04.json index 01374bd..17eeb99 100644 --- a/trunk/jsoncpp/test/test_basic_04.json +++ b/trunk/jsoncpp/test/test_basic_04.json @@ -1,2 +1,2 @@ -"abcdef" - +"abcdef" + diff --git a/trunk/jsoncpp/test/test_basic_05.expected b/trunk/jsoncpp/test/test_basic_05.expected index cb1cdad..c8db822 100644 --- a/trunk/jsoncpp/test/test_basic_05.expected +++ b/trunk/jsoncpp/test/test_basic_05.expected @@ -1,2 +1,2 @@ -.=null - +.=null + diff --git a/trunk/jsoncpp/test/test_basic_05.json b/trunk/jsoncpp/test/test_basic_05.json index a6d4f5a..d0aaea2 100644 --- a/trunk/jsoncpp/test/test_basic_05.json +++ b/trunk/jsoncpp/test/test_basic_05.json @@ -1,2 +1,2 @@ -null - +null + diff --git a/trunk/jsoncpp/test/test_basic_06.expected b/trunk/jsoncpp/test/test_basic_06.expected index 8b22731..49be55a 100644 --- a/trunk/jsoncpp/test/test_basic_06.expected +++ b/trunk/jsoncpp/test/test_basic_06.expected @@ -1,2 +1,2 @@ -.=true - +.=true + diff --git a/trunk/jsoncpp/test/test_basic_06.json b/trunk/jsoncpp/test/test_basic_06.json index 5d967af..7eead1e 100644 --- a/trunk/jsoncpp/test/test_basic_06.json +++ b/trunk/jsoncpp/test/test_basic_06.json @@ -1,2 +1,2 @@ -true - +true + diff --git a/trunk/jsoncpp/test/test_basic_07.expected b/trunk/jsoncpp/test/test_basic_07.expected index 4979ed5..fe55a6a 100644 --- a/trunk/jsoncpp/test/test_basic_07.expected +++ b/trunk/jsoncpp/test/test_basic_07.expected @@ -1,2 +1,2 @@ -.=false - +.=false + diff --git a/trunk/jsoncpp/test/test_basic_07.json b/trunk/jsoncpp/test/test_basic_07.json index b7ee6c5..a864bc4 100644 --- a/trunk/jsoncpp/test/test_basic_07.json +++ b/trunk/jsoncpp/test/test_basic_07.json @@ -1,2 +1,2 @@ -false - +false + diff --git a/trunk/jsoncpp/test/test_basic_08.expected b/trunk/jsoncpp/test/test_basic_08.expected index cb1cdad..c8db822 100644 --- a/trunk/jsoncpp/test/test_basic_08.expected +++ b/trunk/jsoncpp/test/test_basic_08.expected @@ -1,2 +1,2 @@ -.=null - +.=null + diff --git a/trunk/jsoncpp/test/test_basic_08.json b/trunk/jsoncpp/test/test_basic_08.json index fe107f4..fd78837 100644 --- a/trunk/jsoncpp/test/test_basic_08.json +++ b/trunk/jsoncpp/test/test_basic_08.json @@ -1,3 +1,3 @@ -// C++ style comment -null - +// C++ style comment +null + diff --git a/trunk/jsoncpp/test/test_basic_09.expected b/trunk/jsoncpp/test/test_basic_09.expected index cb1cdad..c8db822 100644 --- a/trunk/jsoncpp/test/test_basic_09.expected +++ b/trunk/jsoncpp/test/test_basic_09.expected @@ -1,2 +1,2 @@ -.=null - +.=null + diff --git a/trunk/jsoncpp/test/test_basic_09.json b/trunk/jsoncpp/test/test_basic_09.json index e0cb089..fc95f0f 100644 --- a/trunk/jsoncpp/test/test_basic_09.json +++ b/trunk/jsoncpp/test/test_basic_09.json @@ -1,4 +1,4 @@ -/* C style comment - */ -null - +/* C style comment + */ +null + diff --git a/trunk/jsoncpp/test/test_complex_01.expected b/trunk/jsoncpp/test/test_complex_01.expected index 44e753b..7573c88 100644 --- a/trunk/jsoncpp/test/test_complex_01.expected +++ b/trunk/jsoncpp/test/test_complex_01.expected @@ -1,20 +1,20 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/trunk/jsoncpp/test/test_complex_01.json b/trunk/jsoncpp/test/test_complex_01.json index fb2f86c..cc0f30f 100644 --- a/trunk/jsoncpp/test/test_complex_01.json +++ b/trunk/jsoncpp/test/test_complex_01.json @@ -1,17 +1,17 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/trunk/jsoncpp/test/test_integer_01.expected b/trunk/jsoncpp/test/test_integer_01.expected index 24aa29e..593f1db 100644 --- a/trunk/jsoncpp/test/test_integer_01.expected +++ b/trunk/jsoncpp/test/test_integer_01.expected @@ -1 +1 @@ -.=2147483647 +.=2147483647 diff --git a/trunk/jsoncpp/test/test_integer_01.json b/trunk/jsoncpp/test/test_integer_01.json index e82c7ad..5ab12ff 100644 --- a/trunk/jsoncpp/test/test_integer_01.json +++ b/trunk/jsoncpp/test/test_integer_01.json @@ -1,2 +1,2 @@ -// Max signed integer -2147483647 +// Max signed integer +2147483647 diff --git a/trunk/jsoncpp/test/test_integer_02.expected b/trunk/jsoncpp/test/test_integer_02.expected index dab99eb..4b83bd7 100644 --- a/trunk/jsoncpp/test/test_integer_02.expected +++ b/trunk/jsoncpp/test/test_integer_02.expected @@ -1 +1 @@ -.=-2147483648 +.=-2147483648 diff --git a/trunk/jsoncpp/test/test_integer_02.json b/trunk/jsoncpp/test/test_integer_02.json index 548764e..056c850 100644 --- a/trunk/jsoncpp/test/test_integer_02.json +++ b/trunk/jsoncpp/test/test_integer_02.json @@ -1,2 +1,2 @@ -// Min signed integer --2147483648 +// Min signed integer +-2147483648 diff --git a/trunk/jsoncpp/test/test_integer_03.expected b/trunk/jsoncpp/test/test_integer_03.expected index dde3260..37c1cb1 100644 --- a/trunk/jsoncpp/test/test_integer_03.expected +++ b/trunk/jsoncpp/test/test_integer_03.expected @@ -1 +1 @@ -.=4294967295 +.=4294967295 diff --git a/trunk/jsoncpp/test/test_integer_03.json b/trunk/jsoncpp/test/test_integer_03.json index 18aeaf6..12ef3fb 100644 --- a/trunk/jsoncpp/test/test_integer_03.json +++ b/trunk/jsoncpp/test/test_integer_03.json @@ -1,2 +1,2 @@ -// Max unsigned integer -4294967295 +// Max unsigned integer +4294967295 diff --git a/trunk/jsoncpp/test/test_integer_04.expected b/trunk/jsoncpp/test/test_integer_04.expected index 8da9013..b7b548e 100644 --- a/trunk/jsoncpp/test/test_integer_04.expected +++ b/trunk/jsoncpp/test/test_integer_04.expected @@ -1,2 +1,2 @@ -.=0 - +.=0 + diff --git a/trunk/jsoncpp/test/test_integer_04.json b/trunk/jsoncpp/test/test_integer_04.json index 8202483..bf81499 100644 --- a/trunk/jsoncpp/test/test_integer_04.json +++ b/trunk/jsoncpp/test/test_integer_04.json @@ -1,3 +1,3 @@ -// Min unsigned integer -0 - +// Min unsigned integer +0 + diff --git a/trunk/jsoncpp/test/test_integer_05.expected b/trunk/jsoncpp/test/test_integer_05.expected index 238d1d6..0caea9d 100644 --- a/trunk/jsoncpp/test/test_integer_05.expected +++ b/trunk/jsoncpp/test/test_integer_05.expected @@ -1,2 +1,2 @@ -.=1 - +.=1 + diff --git a/trunk/jsoncpp/test/test_integer_05.json b/trunk/jsoncpp/test/test_integer_05.json index 4797790..d474e1b 100644 --- a/trunk/jsoncpp/test/test_integer_05.json +++ b/trunk/jsoncpp/test/test_integer_05.json @@ -1,2 +1,2 @@ -1 - +1 + diff --git a/trunk/jsoncpp/test/test_object_01.expected b/trunk/jsoncpp/test/test_object_01.expected index 8e0634e..67444e5 100644 --- a/trunk/jsoncpp/test/test_object_01.expected +++ b/trunk/jsoncpp/test/test_object_01.expected @@ -1 +1 @@ -.={} +.={} diff --git a/trunk/jsoncpp/test/test_object_01.json b/trunk/jsoncpp/test/test_object_01.json index 69a88e3..0967ef4 100644 --- a/trunk/jsoncpp/test/test_object_01.json +++ b/trunk/jsoncpp/test/test_object_01.json @@ -1 +1 @@ -{} +{} diff --git a/trunk/jsoncpp/test/test_object_02.expected b/trunk/jsoncpp/test/test_object_02.expected index 2c9de06..79391c2 100644 --- a/trunk/jsoncpp/test/test_object_02.expected +++ b/trunk/jsoncpp/test/test_object_02.expected @@ -1,2 +1,2 @@ -.={} -.count=1234 +.={} +.count=1234 diff --git a/trunk/jsoncpp/test/test_object_02.json b/trunk/jsoncpp/test/test_object_02.json index bd157ec..d0f2fac 100644 --- a/trunk/jsoncpp/test/test_object_02.json +++ b/trunk/jsoncpp/test/test_object_02.json @@ -1 +1 @@ -{ "count" : 1234 } +{ "count" : 1234 } diff --git a/trunk/jsoncpp/test/test_object_03.expected b/trunk/jsoncpp/test/test_object_03.expected index 235a28e..5e96113 100644 --- a/trunk/jsoncpp/test/test_object_03.expected +++ b/trunk/jsoncpp/test/test_object_03.expected @@ -1,4 +1,4 @@ -.={} -.attribute="random" -.count=1234 -.name="test" +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/trunk/jsoncpp/test/test_object_03.json b/trunk/jsoncpp/test/test_object_03.json index 0947a44..4fcd4d8 100644 --- a/trunk/jsoncpp/test/test_object_03.json +++ b/trunk/jsoncpp/test/test_object_03.json @@ -1,5 +1,5 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/trunk/jsoncpp/test/test_object_04.expected b/trunk/jsoncpp/test/test_object_04.expected index cf4d7c3..812965b 100644 --- a/trunk/jsoncpp/test/test_object_04.expected +++ b/trunk/jsoncpp/test/test_object_04.expected @@ -1,2 +1,2 @@ -.={} -.=1234 +.={} +.=1234 diff --git a/trunk/jsoncpp/test/test_object_04.json b/trunk/jsoncpp/test/test_object_04.json index f1e364a..450762d 100644 --- a/trunk/jsoncpp/test/test_object_04.json +++ b/trunk/jsoncpp/test/test_object_04.json @@ -1,3 +1,3 @@ -{ - "" : 1234 -} +{ + "" : 1234 +} diff --git a/trunk/jsoncpp/test/test_preserve_comment_01.expected b/trunk/jsoncpp/test/test_preserve_comment_01.expected index b5616a9..8d88041 100644 --- a/trunk/jsoncpp/test/test_preserve_comment_01.expected +++ b/trunk/jsoncpp/test/test_preserve_comment_01.expected @@ -1,3 +1,3 @@ -.={} -.first=1 -.second=2 +.={} +.first=1 +.second=2 diff --git a/trunk/jsoncpp/test/test_preserve_comment_01.json b/trunk/jsoncpp/test/test_preserve_comment_01.json index 0291fff..fabd55d 100644 --- a/trunk/jsoncpp/test/test_preserve_comment_01.json +++ b/trunk/jsoncpp/test/test_preserve_comment_01.json @@ -1,14 +1,14 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/trunk/jsoncpp/test/test_real_01.expected b/trunk/jsoncpp/test/test_real_01.expected index 57dee39..ae23572 100644 --- a/trunk/jsoncpp/test/test_real_01.expected +++ b/trunk/jsoncpp/test/test_real_01.expected @@ -1,2 +1,2 @@ -.=8589934592 - +.=8589934592 + diff --git a/trunk/jsoncpp/test/test_real_01.json b/trunk/jsoncpp/test/test_real_01.json index 5cb1bbf..358452d 100644 --- a/trunk/jsoncpp/test/test_real_01.json +++ b/trunk/jsoncpp/test/test_real_01.json @@ -1,3 +1,3 @@ -// 2^33 => out of integer range, switch to double -8589934592 - +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/trunk/jsoncpp/test/test_real_02.expected b/trunk/jsoncpp/test/test_real_02.expected index 181592a..df8de42 100644 --- a/trunk/jsoncpp/test/test_real_02.expected +++ b/trunk/jsoncpp/test/test_real_02.expected @@ -1,2 +1,2 @@ -.=-4294967295 - +.=-4294967295 + diff --git a/trunk/jsoncpp/test/test_real_02.json b/trunk/jsoncpp/test/test_real_02.json index 45092ef..936c706 100644 --- a/trunk/jsoncpp/test/test_real_02.json +++ b/trunk/jsoncpp/test/test_real_02.json @@ -1,3 +1,3 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/trunk/jsoncpp/test/test_real_03.expected b/trunk/jsoncpp/test/test_real_03.expected index 181592a..df8de42 100644 --- a/trunk/jsoncpp/test/test_real_03.expected +++ b/trunk/jsoncpp/test/test_real_03.expected @@ -1,2 +1,2 @@ -.=-4294967295 - +.=-4294967295 + diff --git a/trunk/jsoncpp/test/test_real_03.json b/trunk/jsoncpp/test/test_real_03.json index 45092ef..936c706 100644 --- a/trunk/jsoncpp/test/test_real_03.json +++ b/trunk/jsoncpp/test/test_real_03.json @@ -1,3 +1,3 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/trunk/jsoncpp/test/test_real_04.expected b/trunk/jsoncpp/test/test_real_04.expected index 2f84bf1..d726abe 100644 --- a/trunk/jsoncpp/test/test_real_04.expected +++ b/trunk/jsoncpp/test/test_real_04.expected @@ -1,2 +1,2 @@ -.=1.2345678 - +.=1.2345678 + diff --git a/trunk/jsoncpp/test/test_real_04.json b/trunk/jsoncpp/test/test_real_04.json index 7e71794..a8eb6d0 100644 --- a/trunk/jsoncpp/test/test_real_04.json +++ b/trunk/jsoncpp/test/test_real_04.json @@ -1,3 +1,3 @@ -// 1.2345678 -12345678e-7 - +// 1.2345678 +12345678e-7 + diff --git a/trunk/jsoncpp/test/test_real_05.expected b/trunk/jsoncpp/test/test_real_05.expected index 168f6e8..949fd8f 100644 --- a/trunk/jsoncpp/test/test_real_05.expected +++ b/trunk/jsoncpp/test/test_real_05.expected @@ -1,3 +1,3 @@ -.=1234567.8 - - +.=1234567.8 + + diff --git a/trunk/jsoncpp/test/test_real_05.json b/trunk/jsoncpp/test/test_real_05.json index 950f6a7..f7923ba 100644 --- a/trunk/jsoncpp/test/test_real_05.json +++ b/trunk/jsoncpp/test/test_real_05.json @@ -1,3 +1,3 @@ -// 1234567.8 -0.12345678e7 - +// 1234567.8 +0.12345678e7 + diff --git a/trunk/jsoncpp/test/test_real_06.expected b/trunk/jsoncpp/test/test_real_06.expected index 45906e3..03b7d7f 100644 --- a/trunk/jsoncpp/test/test_real_06.expected +++ b/trunk/jsoncpp/test/test_real_06.expected @@ -1,3 +1,3 @@ -.=-1.2345678 - - +.=-1.2345678 + + diff --git a/trunk/jsoncpp/test/test_real_06.json b/trunk/jsoncpp/test/test_real_06.json index dde1916..485419a 100644 --- a/trunk/jsoncpp/test/test_real_06.json +++ b/trunk/jsoncpp/test/test_real_06.json @@ -1,3 +1,3 @@ -// -1.2345678 --12345678e-7 - +// -1.2345678 +-12345678e-7 + diff --git a/trunk/jsoncpp/test/test_real_07.expected b/trunk/jsoncpp/test/test_real_07.expected index f2922f9..12025a4 100644 --- a/trunk/jsoncpp/test/test_real_07.expected +++ b/trunk/jsoncpp/test/test_real_07.expected @@ -1,3 +1,3 @@ -.=-1234567.8 - - +.=-1234567.8 + + diff --git a/trunk/jsoncpp/test/test_real_07.json b/trunk/jsoncpp/test/test_real_07.json index dd43ba7..8013eb5 100644 --- a/trunk/jsoncpp/test/test_real_07.json +++ b/trunk/jsoncpp/test/test_real_07.json @@ -1,3 +1,3 @@ -// -1234567.8 --0.12345678e7 - +// -1234567.8 +-0.12345678e7 + From 2a25fa510bde6484e2b0ac87a820e04289eeeb77 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 22:01:40 +0000 Subject: [PATCH 057/268] added dependency git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@57 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/jsontestrunner/sconscript | 3 +++ 1 file changed, 3 insertions(+) diff --git a/trunk/jsoncpp/src/jsontestrunner/sconscript b/trunk/jsoncpp/src/jsontestrunner/sconscript index e9fe37f..6e68e31 100644 --- a/trunk/jsoncpp/src/jsontestrunner/sconscript +++ b/trunk/jsoncpp/src/jsontestrunner/sconscript @@ -4,3 +4,6 @@ buildJSONTests( env_testing, Split( """ main.cpp """ ), 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') From 4ac04148f3565ae177af837ae3f8fdc584e5cfd3 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 22:03:11 +0000 Subject: [PATCH 058/268] added comment -- I always have to run twice to build docs because the doxyfile gets updated and causes scons to get confused. I do not know how to fix this, but I simply run 'scons doc/doxyfile' first. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@58 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/sconscript | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/trunk/jsoncpp/doc/sconscript b/trunk/jsoncpp/doc/sconscript index dc29320..3e4205c 100644 --- a/trunk/jsoncpp/doc/sconscript +++ b/trunk/jsoncpp/doc/sconscript @@ -20,3 +20,7 @@ if 'doxygen' in env['TOOLS']: TARGZ_BASEDIR = doc_topdir ) env.Depends( zip_doc_cmd, alias_doc_cmd ) env.Alias( 'doc-dist', zip_doc_cmd ) + + # When doxyfile gets updated, I get errors on the first pass. + # I have to run scons twice. Something is wrong with the dependencies + # here, but I avoid it by running "scons doc/doxyfile" first. From e387c4ac8a5ec1c0036cd59500bb06e4ffd56596 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 22:04:54 +0000 Subject: [PATCH 059/268] This file is auto-generated, even if it already exists. That is causing scons dependency problems, but the right answer is not to add the file, apparently. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@59 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/doxyfile | 232 ------------------------------------- 1 file changed, 232 deletions(-) delete mode 100644 trunk/jsoncpp/doc/doxyfile diff --git a/trunk/jsoncpp/doc/doxyfile b/trunk/jsoncpp/doc/doxyfile deleted file mode 100644 index 982347c..0000000 --- a/trunk/jsoncpp/doc/doxyfile +++ /dev/null @@ -1,232 +0,0 @@ -# Doxyfile 1.4.3 - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- -PROJECT_NAME = "JsonCpp" -PROJECT_NUMBER = 0.1 -OUTPUT_DIRECTORY = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp/buildscons -CREATE_SUBDIRS = NO -OUTPUT_LANGUAGE = English -USE_WINDOWS_ENCODING = NO -BRIEF_MEMBER_DESC = YES -REPEAT_BRIEF = YES -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the -ALWAYS_DETAILED_SEC = NO -INLINE_INHERITED_MEMB = NO -FULL_PATH_NAMES = YES -STRIP_FROM_PATH = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp -STRIP_FROM_INC_PATH = /tmp/cdunn/www/jsoncpp/trunk/jsoncpp/include -SHORT_NAMES = NO -JAVADOC_AUTOBRIEF = NO -MULTILINE_CPP_IS_BRIEF = NO -DETAILS_AT_TOP = NO -INHERIT_DOCS = YES -DISTRIBUTE_GROUP_DOC = NO -SEPARATE_MEMBER_PAGES = NO -TAB_SIZE = 3 -ALIASES = -OPTIMIZE_OUTPUT_FOR_C = NO -OPTIMIZE_OUTPUT_JAVA = NO -SUBGROUPING = YES -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- -EXTRACT_ALL = YES -EXTRACT_PRIVATE = NO -EXTRACT_STATIC = YES -EXTRACT_LOCAL_CLASSES = NO -EXTRACT_LOCAL_METHODS = NO -HIDE_UNDOC_MEMBERS = NO -HIDE_UNDOC_CLASSES = NO -HIDE_FRIEND_COMPOUNDS = NO -HIDE_IN_BODY_DOCS = NO -INTERNAL_DOCS = YES -CASE_SENSE_NAMES = NO -HIDE_SCOPE_NAMES = NO -SHOW_INCLUDE_FILES = YES -INLINE_INFO = YES -SORT_MEMBER_DOCS = YES -SORT_BRIEF_DOCS = NO -SORT_BY_SCOPE_NAME = NO -GENERATE_TODOLIST = YES -GENERATE_TESTLIST = YES -GENERATE_BUGLIST = YES -GENERATE_DEPRECATEDLIST= YES -ENABLED_SECTIONS = -MAX_INITIALIZER_LINES = 30 -SHOW_USED_FILES = YES -SHOW_DIRECTORIES = YES -FILE_VERSION_FILTER = -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- -QUIET = NO -WARNINGS = YES -WARN_IF_UNDOCUMENTED = YES -WARN_IF_DOC_ERROR = YES -WARN_NO_PARAMDOC = NO -WARN_FORMAT = "$file:$line: $text" -WARN_LOGFILE = jsoncpp-doxygen-warning.log -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- -INPUT = ../include ../src/lib_json . -FILE_PATTERNS = *.h *.cpp *.dox -RECURSIVE = YES -EXCLUDE = -EXCLUDE_SYMLINKS = NO -EXCLUDE_PATTERNS = -EXAMPLE_PATH = -EXAMPLE_PATTERNS = * -EXAMPLE_RECURSIVE = NO -IMAGE_PATH = -INPUT_FILTER = -FILTER_PATTERNS = -FILTER_SOURCE_FILES = NO -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- -SOURCE_BROWSER = YES -INLINE_SOURCES = NO -STRIP_CODE_COMMENTS = YES -REFERENCED_BY_RELATION = YES -REFERENCES_RELATION = YES -USE_HTAGS = NO -VERBATIM_HEADERS = YES -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- -ALPHABETICAL_INDEX = NO -COLS_IN_ALPHA_INDEX = 5 -IGNORE_PREFIX = -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- -GENERATE_HTML = YES -HTML_OUTPUT = json-html-doc-0.1 -HTML_FILE_EXTENSION = .html -HTML_HEADER = header.html -HTML_FOOTER = footer.html -HTML_STYLESHEET = -HTML_ALIGN_MEMBERS = YES -GENERATE_HTMLHELP = NO -CHM_FILE = jsoncpp.chm -HHC_LOCATION = -GENERATE_CHI = NO -BINARY_TOC = NO -TOC_EXPAND = NO -DISABLE_INDEX = NO -ENUM_VALUES_PER_LINE = 4 -GENERATE_TREEVIEW = NO -TREEVIEW_WIDTH = 250 -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- -GENERATE_LATEX = NO -LATEX_OUTPUT = latex -LATEX_CMD_NAME = latex -MAKEINDEX_CMD_NAME = makeindex -COMPACT_LATEX = NO -PAPER_TYPE = a4wide -EXTRA_PACKAGES = -LATEX_HEADER = -PDF_HYPERLINKS = NO -USE_PDFLATEX = NO -LATEX_BATCHMODE = NO -LATEX_HIDE_INDICES = NO -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- -GENERATE_RTF = NO -RTF_OUTPUT = rtf -COMPACT_RTF = NO -RTF_HYPERLINKS = NO -RTF_STYLESHEET_FILE = -RTF_EXTENSIONS_FILE = -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- -GENERATE_MAN = NO -MAN_OUTPUT = man -MAN_EXTENSION = .3 -MAN_LINKS = NO -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- -GENERATE_XML = NO -XML_OUTPUT = xml -XML_SCHEMA = -XML_DTD = -XML_PROGRAMLISTING = YES -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- -GENERATE_AUTOGEN_DEF = NO -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- -GENERATE_PERLMOD = NO -PERLMOD_LATEX = NO -PERLMOD_PRETTY = YES -PERLMOD_MAKEVAR_PREFIX = -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- -ENABLE_PREPROCESSING = YES -MACRO_EXPANSION = NO -EXPAND_ONLY_PREDEF = NO -SEARCH_INCLUDES = YES -INCLUDE_PATH = ../include -INCLUDE_FILE_PATTERNS = *.h -PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP -EXPAND_AS_DEFINED = -SKIP_FUNCTION_MACROS = YES -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- -TAGFILES = -GENERATE_TAGFILE = -ALLEXTERNALS = NO -EXTERNAL_GROUPS = YES -PERL_PATH = /usr/bin/perl -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- -CLASS_DIAGRAMS = NO -HIDE_UNDOC_RELATIONS = YES -HAVE_DOT = NO -CLASS_GRAPH = YES -COLLABORATION_GRAPH = YES -GROUP_GRAPHS = YES -UML_LOOK = NO -TEMPLATE_RELATIONS = NO -INCLUDE_GRAPH = YES -INCLUDED_BY_GRAPH = YES -CALL_GRAPH = NO -GRAPHICAL_HIERARCHY = YES -DIRECTORY_GRAPH = YES -DOT_IMAGE_FORMAT = png -DOT_PATH = -DOTFILE_DIRS = -MAX_DOT_GRAPH_WIDTH = 1024 -MAX_DOT_GRAPH_HEIGHT = 1024 -MAX_DOT_GRAPH_DEPTH = 1000 -DOT_TRANSPARENT = NO -DOT_MULTI_TARGETS = NO -GENERATE_LEGEND = YES -DOT_CLEANUP = YES -#--------------------------------------------------------------------------- -# Configuration::additions related to the search engine -#--------------------------------------------------------------------------- -SEARCHENGINE = NO From a95825ffae61ccb3295bcb2f63bf1267b4e146e1 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 14 Jun 2007 22:07:15 +0000 Subject: [PATCH 060/268] Unneeded comment. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@60 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 5 ----- 1 file changed, 5 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 8167484..a957617 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -183,8 +183,3 @@ buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'doc' ) -# libs was happening before bin by chance, I think. When I added -# the compiler version to linux-gcc, the order changed. This -# fixes it (I believe). -env.Depends('bin', 'libs') -env.Depends('check', 'bin') From 152e2e47ccdec034caddfb1b393c35a7210e968a Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 19 Jan 2008 12:12:35 +0000 Subject: [PATCH 061/268] Added rought roadmap. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@61 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 trunk/jsoncpp/doc/roadmap.dox diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox new file mode 100644 index 0000000..2a0dcc6 --- /dev/null +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -0,0 +1,33 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Fix doxygen build issue (discard doxygen dependency check, always rebuild) + - Add enable/disable flag for static and shared library build + - Enhance help + - Test with recent Scons checkpoint + - Platform portability check: + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_unicode Clean-up unicode handling + - Ensure reader properly convert \u and \U unicode sequence to UTF8 + - Ensure writer emit only UTF8 string. + - Provides hook to convert string to/from utf8/other encoding. + - look into iconv, icu and windows API + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ From 1fe77012adf3410dfec0dec638c1a859ad488b8e Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 20 Jan 2008 16:49:53 +0000 Subject: [PATCH 062/268] - rewrote doxygen documentation generation integration with Scons (still need some clean-up): list of sources is explicitly passed to a doxyfile builder which is used as input of a doxygen builder. Hence, the doxyfile depends on all the sources. - documentation is now correctly generated once when source are changed on the first scons run. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@62 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 23 ++- trunk/jsoncpp/doc/roadmap.dox | 2 + trunk/jsoncpp/doc/sconscript | 76 +++++--- trunk/jsoncpp/scons-tools/doxygen.py | 269 +++++++++------------------ trunk/jsoncpp/scons-tools/glob.py | 53 ++++++ 5 files changed, 211 insertions(+), 212 deletions(-) create mode 100644 trunk/jsoncpp/scons-tools/glob.py diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index a957617..c714f39 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -15,15 +15,15 @@ options.Add( EnumOption('platform', try: platform = ARGUMENTS['platform'] if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH except KeyError: print 'You must specify a "platform"' sys.exit(2) @@ -95,6 +95,7 @@ env.Tool('doxygen') env.Tool('substinfile') env.Tool('targz') env.Tool('srcdist') +env.Tool('glob') env.Append( CPPPATH = ['#include'], LIBPATH = lib_dir ) @@ -165,9 +166,6 @@ def runJSONTests_action( target, source = None, env = None ): def runJSONTests_string( target, source = None, env = None ): return 'RunJSONTests("%s")' % source -##def buildDoc( doxyfile_path ): -## doc_cmd = env.Doxygen( doxyfile_path ) - import SCons.Action ActionFactory = SCons.Action.ActionFactory RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) @@ -182,4 +180,5 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'doc' ) +#print env.Dump() diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index 2a0dcc6..84648b9 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -21,6 +21,8 @@ - look into iconv, icu and windows API \section ms_strict Adds a strict mode to reader/parser Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support \section ms_separation Expose json reader/writer API that do not impose using Json::Value. Some typical use-case involve an application specific structure to/from a JSON document. - Performance oriented parser/writer: diff --git a/trunk/jsoncpp/doc/sconscript b/trunk/jsoncpp/doc/sconscript index 3e4205c..62b481e 100644 --- a/trunk/jsoncpp/doc/sconscript +++ b/trunk/jsoncpp/doc/sconscript @@ -1,26 +1,60 @@ Import( 'env' ) import os.path -if 'doxygen' in env['TOOLS']: - doc_topdir = env['ROOTBUILD_DIR'] - doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', - SUBST_DICT = { - '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], - '%TOPDIR%' : env.Dir('#').abspath, - '%DOC_TOPDIR%' : str(doc_topdir) } ) - doc_cmd = env.Doxygen( doxyfile ) - alias_doc_cmd = env.Alias('doc', doc_cmd ) - env.AlwaysBuild(alias_doc_cmd) +if 'doxygen' in env['TOOLS']: + doc_topdir = str(env['ROOTBUILD_DIR']) + html_dir = 'jsoncpp-api-doc' - for dir in doc_cmd: - env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) - filename = os.path.split(dir.path)[1] - targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) - zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], - TARGZ_BASEDIR = doc_topdir ) - env.Depends( zip_doc_cmd, alias_doc_cmd ) - env.Alias( 'doc-dist', zip_doc_cmd ) + doxygen_inputs = env.Glob( includes = '*.dox', dir = '#doc' ) \ + + env.Glob( includes = '*.h', dir = '#include/json/' ) \ + + env.Glob( includes = ('*.dox','*.h','*.inl','*.cpp'), + dir = '#src/lib_json' ) +## for p in doxygen_inputs: +## print p.abspath - # When doxyfile gets updated, I get errors on the first pass. - # I have to run scons twice. Something is wrong with the dependencies - # here, but I avoid it by running "scons doc/doxyfile" first. + top_dir = env.Dir('#').abspath + include_top_dir = env.Dir('#include').abspath + env['DOXYFILE_DICT'] = { 'PROJECT_NAME': 'JsonCpp', + 'PROJECT_NUMBER': env['JSONCPP_VERSION'], + 'STRIP_FROM_PATH': top_dir, + 'STRIP_FROM_INC_PATH': include_top_dir, + 'HTML_OUTPUT': html_dir, + 'HTML_HEADER': env.File('#doc/header.html').abspath, + 'HTML_FOOTER': env.File('#doc/footer.html').abspath, + 'INCLUDE_PATH': include_top_dir, + 'PREDEFINED': 'JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP' + } + env['DOXYFILE_FILE'] = 'doxyfile.in' + doxfile_nodes = env.Doxyfile( os.path.join( doc_topdir, 'doxyfile' ), doxygen_inputs ) + html_doc_path = os.path.join( doc_topdir, html_dir ) + doc_nodes = env.Doxygen( source = doxfile_nodes, + target = os.path.join( html_doc_path, 'index.html' ) ) + alias_doc_cmd = env.Alias('doc', doc_nodes ) + env.Alias('doc', env.Install( html_doc_path, '#README.txt' ) ) + targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % html_dir ) + zip_doc_cmd = env.TarGz( targz_path, [env.Dir(html_doc_path)], + TARGZ_BASEDIR = env['ROOTBUILD_DIR'] ) + env.Depends( zip_doc_cmd, alias_doc_cmd ) + env.Alias( 'doc-dist', zip_doc_cmd ) +## +## doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', +## SUBST_DICT = { +## '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], +## '%TOPDIR%' : env.Dir('#').abspath, +## '%DOC_TOPDIR%' : str(doc_topdir) } ) +## doc_cmd = env.Doxygen( doxyfile ) +## alias_doc_cmd = env.Alias('doc', doc_cmd ) +## env.AlwaysBuild(alias_doc_cmd) +## +## for dir in doc_cmd: +## env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) +## filename = os.path.split(dir.path)[1] +## targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) +## zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], +## TARGZ_BASEDIR = doc_topdir ) +## env.Depends( zip_doc_cmd, alias_doc_cmd ) +## env.Alias( 'doc-dist', zip_doc_cmd ) +## +## # When doxyfile gets updated, I get errors on the first pass. +## # I have to run scons twice. Something is wrong with the dependencies +## # here, but I avoid it by running "scons doc/doxyfile" first. diff --git a/trunk/jsoncpp/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py index 7b8dee4..a03314e 100644 --- a/trunk/jsoncpp/scons-tools/doxygen.py +++ b/trunk/jsoncpp/scons-tools/doxygen.py @@ -2,201 +2,112 @@ # emitter depends on doxyfile which is generated from doxyfile.in. # build fails after cleaning and relaunching the build. +# Todo: +# Add helper function to environment like for glob +# Easier passage of header/footer +# Automatic deduction of index.html path based on custom parameters passed to doxyfile + import os import os.path import glob from fnmatch import fnmatch +import SCons -def DoxyfileParse(file_contents): - """ - Parse a Doxygen source file and return a dictionary of all the values. - Values will be strings and lists of strings. +def Doxyfile_emitter(target, source, env): """ - data = {} - - import shlex - lex = shlex.shlex(instream = file_contents, posix = True) - lex.wordchars += "*+./-:" - lex.whitespace = lex.whitespace.replace("\n", "") - lex.escape = "" - - lineno = lex.lineno - last_backslash_lineno = lineno - token = lex.get_token() - key = token # the first token should be a key - last_token = "" - key_token = False - next_key = False - new_data = True - - def append_data(data, key, new_data, token): - if new_data or len(data[key]) == 0: - data[key].append(token) - else: - data[key][-1] += token - - while token: - if token in ['\n']: - if last_token not in ['\\']: - key_token = True - elif token in ['\\']: - pass - elif key_token: - key = token - key_token = False - else: - if token == "+=": - if not data.has_key(key): - data[key] = list() - elif token == "=": - data[key] = list() - else: - append_data( data, key, new_data, token ) - new_data = True - - last_token = token - token = lex.get_token() - - if last_token == '\\' and token != '\n': - new_data = False - append_data( data, key, new_data, '\\' ) - - # compress lists of len 1 into single strings - for (k, v) in data.items(): - if len(v) == 0: - data.pop(k) + Modify the target and source lists to use the defaults if nothing + else has been specified. - # items in the following list will be kept as lists and not converted to strings - if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: - continue - - if len(v) == 1: - data[k] = v[0] - - return data - -def DoxySourceScan(node, env, path): - """ - Doxygen Doxyfile source scanner. This should scan the Doxygen file and add - any files used to generate docs to the list of source files. + Dependencies on external HTML documentation references are also + appended to the source list. """ - default_file_patterns = [ - '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', - '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', - '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', - '*.py', - ] - - default_exclude_patterns = [ - '*~', - ] - - sources = [] - - data = DoxyfileParse(node.get_contents()) - - if data.get("RECURSIVE", "NO") == "YES": - recursive = True - else: - recursive = False + doxyfile_template = env.File(env['DOXYFILE_FILE']) + source.insert(0, doxyfile_template) - file_patterns = data.get("FILE_PATTERNS", default_file_patterns) - exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) + return target, source - doxyfile_dir = str( node.dir ) +def Doxyfile_Builder(target, source, env): + """Input: + DOXYFILE_FILE + Path of the template file for the output doxyfile -## print 'running from', os.getcwd() - for node in data.get("INPUT", []): - node_real_path = os.path.normpath( os.path.join( doxyfile_dir, node ) ) - if os.path.isfile(node_real_path): -## print str(node), 'is a file' - sources.append(node) - elif os.path.isdir(node_real_path): -## print str(node), 'is a directory' - if recursive: - for root, dirs, files in os.walk(node): - for f in files: - filename = os.path.join(root, f) - - pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) - exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) - - if pattern_check and not exclude_check: - sources.append(filename) -## print ' adding source', os.path.abspath( filename ) - else: - for pattern in file_patterns: - sources.extend(glob.glob(os.path.join( node, pattern))) -## else: -## print str(node), 'is neither a file nor a directory' - sources = map( lambda path: env.File(path), sources ) - return sources - - -def DoxySourceScanCheck(node, env): - """Check if we should scan this file""" - return os.path.isfile(node.path) - -def DoxyEmitter(source, target, env): - """Doxygen Doxyfile emitter""" - # possible output formats and their default values and output locations - output_formats = { - "HTML": ("YES", "html"), - "LATEX": ("YES", "latex"), - "RTF": ("NO", "rtf"), - "MAN": ("YES", "man"), - "XML": ("NO", "xml"), - } - -## print '#### DoxyEmitter:', source[0].abspath, os.path.exists( source[0].abspath ) - data = DoxyfileParse(source[0].get_contents()) - - targets = [] - out_dir = data.get("OUTPUT_DIRECTORY", ".") - - # add our output locations - for (k, v) in output_formats.items(): - if data.get("GENERATE_" + k, v[0]) == "YES": - targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) - - # don't clobber targets - for node in targets: - env.Precious(node) - - # set up cleaning stuff - for node in targets: - clean_cmd = env.Clean(node, node) - env.Depends( clean_cmd, source ) - - return (targets, source) + DOXYFILE_DICT + A dictionnary of parameter to append to the generated doxyfile + """ + subdir = os.path.split(source[0].abspath)[0] + doc_top_dir = os.path.split(target[0].abspath)[0] + doxyfile_path = source[0].abspath + doxy_file = file( target[0].abspath, 'wt' ) + try: + # First, output the template file + try: + f = file(doxyfile_path, 'rt') + doxy_file.write( f.read() ) + f.close() + doxy_file.write( '\n' ) + doxy_file.write( '# Generated content:\n' ) + except: + raise SCons.Errors.UserError, "Can't read doxygen template file '%s'" % doxyfile_path + # Then, the input files + doxy_file.write( 'INPUT = \\\n' ) + for source in source: + if source.abspath != doxyfile_path: # skip doxyfile path, which is the first source + doxy_file.write( '"%s" \\\n' % source.abspath ) + doxy_file.write( '\n' ) + # Dot... + values_dict = { 'HAVE_DOT': env.get('DOT') and 'YES' or 'NO', + 'DOT_PATH': env.get('DOT') and os.path.split(env['DOT'])[0] or '', + 'OUTPUT_DIRECTORY': doc_top_dir, + 'WARN_LOGFILE': target[0].abspath + '-warning.log'} + values_dict.update( env['DOXYFILE_DICT'] ) + # Finally, output user dictionary values which override any of the previously set parameters. + for key, value in values_dict.iteritems(): + doxy_file.write ('%s = "%s"\n' % (key, str(value))) + finally: + doxy_file.close() def generate(env): """ Add builders and construction variables for the - Doxygen tool. This is currently for Doxygen 1.4.6. + Doxygen tool. """ - doxyfile_scanner = env.Scanner( - DoxySourceScan, - "DoxySourceScan", - scan_check = DoxySourceScanCheck, - ) - - doxyfile_builder = env.Builder( - action = env.Action("cd ${SOURCE.dir} && ${DOXYGEN} ${SOURCE.file}", - varlist=['$SOURCES']), - emitter = DoxyEmitter, - target_factory = env.fs.Entry, - single_source = True, - source_scanner = doxyfile_scanner, - ) - - env.Append(BUILDERS = { - 'Doxygen': doxyfile_builder, - }) - - env.AppendUnique( - DOXYGEN = 'doxygen', - ) + ## Doxyfile builder + def doxyfile_message (target, source, env): + return "creating Doxygen config file '%s'" % target[0] + + doxyfile_variables = [ + 'DOXYFILE_DICT', + 'DOXYFILE_FILE' + ] + + doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, + doxyfile_variables ) + + doxyfile_builder = SCons.Builder.Builder( action = doxyfile_action, + emitter = Doxyfile_emitter ) + + env['BUILDERS']['Doxyfile'] = doxyfile_builder + env['DOXYFILE_DICT'] = {} + env['DOXYFILE_FILE'] = 'doxyfile.in' + + ## Doxygen builder + def Doxygen_emitter(target, source, env): + output_dir = str( source[0].dir ) + if str(target[0]) == str(source[0]): + target = env.File( os.path.join( output_dir, 'html', 'index.html' ) ) + return target, source + + doxygen_action = SCons.Action.Action( [ '$DOXYGEN_COM'] ) + doxygen_builder = SCons.Builder.Builder( action = doxygen_action, + emitter = Doxygen_emitter ) + env['BUILDERS']['Doxygen'] = doxygen_builder + env['DOXYGEN_COM'] = '$DOXYGEN $DOXYGEN_FLAGS $SOURCE' + env['DOXYGEN_FLAGS'] = '' + env['DOXYGEN'] = 'doxygen' + + dot_path = env.WhereIs("dot") + if dot_path: + env['DOT'] = dot_path def exists(env): """ diff --git a/trunk/jsoncpp/scons-tools/glob.py b/trunk/jsoncpp/scons-tools/glob.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/trunk/jsoncpp/scons-tools/glob.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True From 53caf66b1a4ddd5323f7d798010b08725ec66eb4 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 20 Jan 2008 16:55:26 +0000 Subject: [PATCH 063/268] Fixed compilation issue on windows (avoid using cstring and use string.h instead). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@63 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 500b322..33b5cbc 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include #if _MSC_VER >= 1400 // VC++ 8.0 @@ -66,7 +67,7 @@ std::string valueToString( bool value ) std::string valueToQuotedString( const char *value ) { // Not sure how to handle unicode... - if (std::strpbrk(value, "\"\\\b\f\n\r\t") == NULL) + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL) return std::string("\"") + value + "\""; // We have to walk value and escape any special characters. // Appending to std::string is not efficient, but this should be rare. From 446398665ab55cc79e7ac86ebadc7b5b5c68157c Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 21 Jan 2008 08:37:06 +0000 Subject: [PATCH 064/268] Sketched out some idea to make the build system more reusable across projects. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@64 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 71 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index c714f39..9542696 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -1,3 +1,74 @@ +""" +Build system can be clean-up by sticking to a few core production factory, with automatic dependencies resolution. +4 basic project productions: +- library +- binary +- documentation +- tests + +* Library: + Input: + - dependencies (other libraries) + - headers: include path & files + - sources + - generated sources + - resources + - generated resources + Production: + - Static library + - Dynamic library + - Naming rule + Life-cycle: + - Library compilation + - Compilation as a dependencies + - Run-time + - Packaging + Identity: + - Name + - Version +* Binary: + Input: + - dependencies (other libraries) + - headers: include path & files (usually empty) + - sources + - generated sources + - resources + - generated resources + - supported variant (optimized/debug, dll/static...) + Production: + - Binary executable + - Manifest [on some platforms] + - Debug symbol [on some platforms] + Life-cycle: + - Compilation + - Run-time + - Packaging + Identity: + - Name + - Version +* Documentation: + Input: + - dependencies (libraries, binaries) + - additional sources + - generated sources + - resources + - generated resources + - supported variant (public/internal) + Production: + - HTML documentation + - PDF documentation + - CHM documentation + Life-cycle: + - Documentation + - Packaging + - Test + Identity: + - Name + - Version +""" + + + import os import os.path import sys From fd7cf8de7d74e549a6ea9d8c3b8f1beb6d8fc5c0 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 24 Jan 2008 20:09:48 +0000 Subject: [PATCH 065/268] Changed valueToString for floats so that zeroes after a decimal are truncated -- saves file size esp. for StyledStreamWriter which uses tabs instead of spaces. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@65 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 28 ++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 33b5cbc..4129984 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -55,6 +55,34 @@ std::string valueToString( double value ) #else sprintf(buffer, "%.16g", value); #endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } return buffer; } From 1997298d54835780fe6a3e888897c844415dfdf9 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Thu, 24 Jan 2008 20:21:11 +0000 Subject: [PATCH 066/268] Forgot to add # to %g modifier for sprintf in valueToString for floats. Otherwise no decimal point appears when only zeroes would follow, which changes the type to integer. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@66 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 4129984..9f2145a 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -51,9 +51,9 @@ std::string valueToString( double value ) { char buffer[32]; #ifdef __STDC_SECURE_LIB__ // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%.16g", value); + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); #else - sprintf(buffer, "%.16g", value); + sprintf(buffer, "%#.16g", value); #endif char* ch = buffer + strlen(buffer) - 1; if (*ch != '0') return buffer; // nothing to truncate, so save time From e5332e3d825f368016bc30cf90ef8434fa22a867 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 26 Sep 2008 07:15:11 +0000 Subject: [PATCH 067/268] - Added basic string test (all char in ASCII range 33-126) and a large string test. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@67 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/test_string_01.expected | 1 + trunk/jsoncpp/test/test_string_01.json | 1 + trunk/jsoncpp/test/test_string_02.expected | 1 + trunk/jsoncpp/test/test_string_02.json | 1 + 4 files changed, 4 insertions(+) create mode 100644 trunk/jsoncpp/test/test_string_01.expected create mode 100644 trunk/jsoncpp/test/test_string_01.json create mode 100644 trunk/jsoncpp/test/test_string_02.expected create mode 100644 trunk/jsoncpp/test/test_string_02.json diff --git a/trunk/jsoncpp/test/test_string_01.expected b/trunk/jsoncpp/test/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_string_01.json b/trunk/jsoncpp/test/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/trunk/jsoncpp/test/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/trunk/jsoncpp/test/test_string_02.expected b/trunk/jsoncpp/test/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_string_02.json b/trunk/jsoncpp/test/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/trunk/jsoncpp/test/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" From 1b19115e72430f8621d3b8679137863569a17680 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Mon, 11 May 2009 20:04:10 +0000 Subject: [PATCH 068/268] Added cstring for memcpy(), as suggested by sebastien.vincent@turnserver.org . This allows gcc-4 to compile. Removed doxygen from SConstruct, since it is now broken; I have been unable to get it to work with either scons 0.97 or scons 2.1; hopefully someone else can get that working again. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@68 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 2 +- trunk/jsoncpp/src/lib_json/json_reader.cpp | 5 +++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 5 +++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 9542696..2c216cc 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -250,6 +250,6 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'doc' ) +#buildProjectInDirectory( 'doc' ) # THIS IS BROKEN. #print env.Dump() diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 0ded777..9869686 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -1,8 +1,9 @@ #include #include #include -#include -#include +#include +#include +#include #include #include diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index a917772..734bf16 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -3,11 +3,12 @@ #include #include #include -#include "assert.h" +#include +#include #ifdef JSON_USE_CPPTL # include #endif -#include // size_t +#include // size_t #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR # include "json_batchallocator.h" #endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR From 868ecfe26e2b5b57e90e17732c557269a4b2a8d3 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Sun, 24 May 2009 22:22:08 +0000 Subject: [PATCH 069/268] Patch from Sebastien Vincent git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@69 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 10 +++++----- trunk/jsoncpp/scons-tools/doxygen.py | 5 +++-- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 2c216cc..0043523 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -76,8 +76,8 @@ import sys JSONCPP_VERSION = '0.1' DIST_DIR = '#dist' -options = Options() -options.Add( EnumOption('platform', +options = Variables() +options.Add( EnumVariable('platform', 'Platform (compiler/stl) used to build the project', 'msvc71', allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), @@ -190,11 +190,11 @@ env['SRCDIST_ADD'] = SrcDistAdder( env ) env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) env['SRCDIST_BUILDER'] = env.TarGz -env_testing = env.Copy( ) +env_testing = env.Clone( ) env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) def buildJSONExample( env, target_sources, target_name ): - env = env.Copy() + env = env.Clone() env.Append( CPPPATH = ['#'] ) exe = env.Program( target=target_name, source=target_sources ) @@ -250,6 +250,6 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) -#buildProjectInDirectory( 'doc' ) # THIS IS BROKEN. +buildProjectInDirectory( 'doc' ) #print env.Dump() diff --git a/trunk/jsoncpp/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py index a03314e..927fd29 100644 --- a/trunk/jsoncpp/scons-tools/doxygen.py +++ b/trunk/jsoncpp/scons-tools/doxygen.py @@ -80,8 +80,9 @@ def doxyfile_message (target, source, env): 'DOXYFILE_FILE' ] - doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, - doxyfile_variables ) + #doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, + # doxyfile_variables ) + doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message) doxyfile_builder = SCons.Builder.Builder( action = doxyfile_action, emitter = Doxyfile_emitter ) From 5a66857c792b90d7af0b6d14c21c717584742090 Mon Sep 17 00:00:00 2001 From: malays Date: Fri, 13 Nov 2009 04:21:14 +0000 Subject: [PATCH 070/268] Implementing support for reading and writing Unicode escape sequences. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@70 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/reader.h | 4 ++ trunk/jsoncpp/src/lib_json/json_reader.cpp | 70 ++++++++++++++++++- trunk/jsoncpp/src/lib_json/json_writer.cpp | 28 +++++++- .../test/test_string_unicode_01.expected | 1 + .../jsoncpp/test/test_string_unicode_01.json | 1 + .../test/test_string_unicode_02.expected | 1 + .../jsoncpp/test/test_string_unicode_02.json | 1 + .../test/test_string_unicode_03.expected | 1 + .../jsoncpp/test/test_string_unicode_03.json | 1 + .../test/test_string_unicode_04.expected | 1 + .../jsoncpp/test/test_string_unicode_04.json | 1 + 11 files changed, 105 insertions(+), 5 deletions(-) create mode 100644 trunk/jsoncpp/test/test_string_unicode_01.expected create mode 100644 trunk/jsoncpp/test/test_string_unicode_01.json create mode 100644 trunk/jsoncpp/test/test_string_unicode_02.expected create mode 100644 trunk/jsoncpp/test/test_string_unicode_02.json create mode 100644 trunk/jsoncpp/test/test_string_unicode_03.expected create mode 100644 trunk/jsoncpp/test/test_string_unicode_03.json create mode 100644 trunk/jsoncpp/test/test_string_unicode_04.expected create mode 100644 trunk/jsoncpp/test/test_string_unicode_04.json diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index f1bc5a2..e113569 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -115,6 +115,10 @@ namespace Json { bool decodeString( Token &token ); bool decodeString( Token &token, std::string &decoded ); bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); bool decodeUnicodeEscapeSequence( Token &token, Location ¤t, Location end, diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 9869686..0e0c2ff 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -36,6 +36,42 @@ containsNewLine( Reader::Location begin, return false; } +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + // Class Reader // ////////////////////////////////////////////////////////////////// @@ -577,10 +613,9 @@ Reader::decodeString( Token &token, std::string &decoded ) case 'u': { unsigned int unicode; - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) return false; - // @todo encode unicode as utf8. - // @todo remember to alter the writer too. + decoded += codePointToUTF8(unicode); } break; default: @@ -595,6 +630,35 @@ Reader::decodeString( Token &token, std::string &decoded ) return true; } +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} bool Reader::decodeUnicodeEscapeSequence( Token &token, diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 9f2145a..111caac 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -4,6 +4,8 @@ #include #include #include +#include +#include #if _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. @@ -11,6 +13,20 @@ namespace Json { +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} static void uintToString( unsigned int value, char *¤t ) { @@ -95,7 +111,7 @@ std::string valueToString( bool value ) std::string valueToQuotedString( const char *value ) { // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL) + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) return std::string("\"") + value + "\""; // We have to walk value and escape any special characters. // Appending to std::string is not efficient, but this should be rare. @@ -132,8 +148,16 @@ std::string valueToQuotedString( const char *value ) // slash is also legal, so I see no reason to escape it. // (I hope I am not misunderstanding something.) default: - result += *c; + if ( isControlCharacter( *c ) ) + { + std::ostringstream oss; + oss << "\\u" << std::hex << std::uppercase << std::setfill('0') << std::setw(4) << static_cast(*c); + result += oss.str(); + } + else + result += *c; } + break; } result += "\""; return result; diff --git a/trunk/jsoncpp/test/test_string_unicode_01.expected b/trunk/jsoncpp/test/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/trunk/jsoncpp/test/test_string_unicode_01.json b/trunk/jsoncpp/test/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_string_unicode_02.expected b/trunk/jsoncpp/test/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/trunk/jsoncpp/test/test_string_unicode_02.json b/trunk/jsoncpp/test/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_string_unicode_03.expected b/trunk/jsoncpp/test/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/trunk/jsoncpp/test/test_string_unicode_03.json b/trunk/jsoncpp/test/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/trunk/jsoncpp/test/test_string_unicode_04.expected b/trunk/jsoncpp/test/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/trunk/jsoncpp/test/test_string_unicode_04.json b/trunk/jsoncpp/test/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/trunk/jsoncpp/test/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file From 9fd21dd716dfa2943ff7739900cc54b28236f0cc Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 15 Nov 2009 22:49:54 +0000 Subject: [PATCH 071/268] Added patch for mingw from Sebastien Vincent git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@71 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 111caac..36983e0 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -66,7 +66,7 @@ std::string valueToString( Value::UInt value ) std::string valueToString( double value ) { char buffer[32]; -#ifdef __STDC_SECURE_LIB__ // Use secure version with visual studio 2005 to avoid warning. +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. sprintf_s(buffer, sizeof(buffer), "%#.16g", value); #else sprintf(buffer, "%#.16g", value); From b61a0da94b457c065aabfccdc5b50dc663d9a490 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 18 Nov 2009 17:01:09 +0000 Subject: [PATCH 072/268] Fixed bug introduced by utf-8 patch with fix provided by Henry Ludemann. All unit tests are now passing. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@72 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 80 ++++++++++++---------- 1 file changed, 44 insertions(+), 36 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 36983e0..e50c2dd 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -120,44 +120,52 @@ std::string valueToQuotedString( const char *value ) std::string result; result.reserve(maxsize); // to avoid lots of mallocs result += "\""; - for (const char* c=value; *c != 0; ++c){ - switch(*c){ + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something.) - default: - if ( isControlCharacter( *c ) ) - { - std::ostringstream oss; - oss << "\\u" << std::hex << std::uppercase << std::setfill('0') << std::setw(4) << static_cast(*c); - result += oss.str(); - } - else - result += *c; + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; } - break; } result += "\""; return result; From 70851cac72b8951906c2166d19163f0902cf41c7 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 18 Nov 2009 17:12:24 +0000 Subject: [PATCH 073/268] Fixed default constructor initialization. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@73 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_valueiterator.inl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index 8da3bf7..9c8e445 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -11,6 +11,11 @@ // ////////////////////////////////////////////////////////////////// ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_( 0 ) +#else +# error fix me // Need to handle uninitialized iterator comparison for experimental maps +#endif { } From 4fc186eeee56782fd322b7e17a565ea31dd44b55 Mon Sep 17 00:00:00 2001 From: malays Date: Wed, 18 Nov 2009 17:40:22 +0000 Subject: [PATCH 074/268] Fixing string index issue when checking for control characters git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@74 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index e50c2dd..f2f8311 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -20,7 +20,7 @@ static bool isControlCharacter(char ch) static bool containsControlCharacter( const char* str ) { - while ( str ) + while ( *str ) { if ( isControlCharacter( *(str++) ) ) return true; From ccc37556ac8dc826595f1c67e68b8125faeadf2f Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 18 Nov 2009 21:27:06 +0000 Subject: [PATCH 075/268] - fixed SCons build on Windows: only build static library (support static/dynamic at the same time requires significant changes) - renamed SCons glob tool to globtool to avoid clash with python glob module. This prevented running the tests. - check target now works with SCons 1.x git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@75 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 21 +++++-- trunk/jsoncpp/scons-tools/doxygen.py | 1 - .../scons-tools/{glob.py => globtool.py} | 0 trunk/jsoncpp/scons-tools/srcdist.py | 1 - trunk/jsoncpp/test/jsontestrunner.py | 6 +- trunk/jsoncpp/test/runjsontests.py | 57 ++++++++++++------- 6 files changed, 56 insertions(+), 30 deletions(-) rename trunk/jsoncpp/scons-tools/{glob.py => globtool.py} (100%) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 0043523..de06710 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -137,24 +137,29 @@ elif platform == 'msvc6': for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' + env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc70': env['MSVS_VERSION']='7.0' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' + env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc71': env['MSVS_VERSION']='7.1' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' + env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc80': env['MSVS_VERSION']='8.0' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -EHsc /nologo /MT' + env['SHARED_LIB_ENABLED'] = False elif platform == 'mingw': env.Tool( 'mingw' ) env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) + env['SHARED_LIB_ENABLED'] = False elif platform.startswith('linux-gcc'): env.Tool( 'default' ) env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) @@ -166,13 +171,16 @@ env.Tool('doxygen') env.Tool('substinfile') env.Tool('targz') env.Tool('srcdist') -env.Tool('glob') +env.Tool('globtool') env.Append( CPPPATH = ['#include'], LIBPATH = lib_dir ) short_platform = platform if short_platform.startswith('msvc'): short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', True) env['LIB_PLATFORM'] = short_platform env['LIB_LINK_TYPE'] = 'lib' # static env['LIB_CRUNTIME'] = 'mt' @@ -210,11 +218,12 @@ def buildJSONTests( env, target_sources, target_name ): def buildLibrary( env, target_sources, target_name ): static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', source=target_sources ) - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) global lib_dir env.Install( lib_dir, static_lib ) - env.Install( lib_dir, shared_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) env['SRCDIST_ADD']( source=[target_sources] ) Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' ) @@ -232,10 +241,10 @@ def runJSONTests_action( target, source = None, env = None ): jsontest_path = Dir( '#test' ).abspath sys.path.insert( 0, jsontest_path ) import runjsontests - return runjsontests.runAllTests( os.path.abspath(source), jsontest_path ) + return runjsontests.runAllTests( os.path.abspath(source[0].path), jsontest_path ) def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source + return 'RunJSONTests("%s")' % source[0] import SCons.Action ActionFactory = SCons.Action.ActionFactory diff --git a/trunk/jsoncpp/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py index 927fd29..5ace420 100644 --- a/trunk/jsoncpp/scons-tools/doxygen.py +++ b/trunk/jsoncpp/scons-tools/doxygen.py @@ -9,7 +9,6 @@ import os import os.path -import glob from fnmatch import fnmatch import SCons diff --git a/trunk/jsoncpp/scons-tools/glob.py b/trunk/jsoncpp/scons-tools/globtool.py similarity index 100% rename from trunk/jsoncpp/scons-tools/glob.py rename to trunk/jsoncpp/scons-tools/globtool.py diff --git a/trunk/jsoncpp/scons-tools/srcdist.py b/trunk/jsoncpp/scons-tools/srcdist.py index 17f029f..3c6a151 100644 --- a/trunk/jsoncpp/scons-tools/srcdist.py +++ b/trunk/jsoncpp/scons-tools/srcdist.py @@ -1,6 +1,5 @@ import os import os.path -import glob from fnmatch import fnmatch import targz diff --git a/trunk/jsoncpp/test/jsontestrunner.py b/trunk/jsoncpp/test/jsontestrunner.py index a076d0c..504f3db 100644 --- a/trunk/jsoncpp/test/jsontestrunner.py +++ b/trunk/jsoncpp/test/jsontestrunner.py @@ -44,15 +44,15 @@ def valueTreeToString( fout, value, path = '.' ): assert False and "Unexpected value type" def parseAndSaveValueTree( input, actual_path ): - root = json.read( input ) + root = json.loads( input ) fout = file( actual_path, 'wt' ) valueTreeToString( fout, root ) fout.close() return root def rewriteValueTree( value, rewrite_path ): - rewrite = json.write( value ) - rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? file( rewrite_path, 'wt').write( rewrite + '\n' ) return rewrite diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index 38bfd6e..2cf37e2 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -1,7 +1,7 @@ import sys import os import os.path -import glob +from glob import glob def compareOutputs( expected, actual, message ): @@ -38,30 +38,49 @@ def safeReadFile( path ): def runAllTests( jsontest_executable_path, input_dir = None ): if not input_dir: input_dir = os.getcwd() - tests = glob.glob( os.path.join( input_dir, '*.json' ) ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + test_jsonchecker = glob( os.path.join( input_dir, 'jsonchecker', '*.json' ) ) failed_tests = [] - for input_path in tests: + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker print 'TESTING:', input_path, - pipe = os.popen( "%s %s" % (jsontest_executable_path, input_path) ) + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s %s %s" % (jsontest_executable_path, options, + input_path) ) process_output = pipe.read() status = pipe.close() - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed') ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) else: - print 'OK' + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' if failed_tests: print From 8d6618514a3d02bad6d9d0ada5d2bff77bce575e Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 18 Nov 2009 21:38:54 +0000 Subject: [PATCH 076/268] - added Features class that describes allowed extension for Reader, to allow for strict configuration - added tests from json.org jsonchecker and modified jsontestrunner to use strict parsing mode when executing them git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@76 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 3 + trunk/jsoncpp/include/json/features.h | 42 +++++++++++ trunk/jsoncpp/include/json/forwards.h | 3 + trunk/jsoncpp/include/json/json.h | 1 + trunk/jsoncpp/include/json/reader.h | 20 ++++-- trunk/jsoncpp/src/jsontestrunner/main.cpp | 82 +++++++++++++++++----- trunk/jsoncpp/src/lib_json/json_reader.cpp | 68 +++++++++++++++++- trunk/jsoncpp/src/lib_json/lib_json.vcproj | 3 + trunk/jsoncpp/test/jsonchecker/fail1.json | 1 + trunk/jsoncpp/test/jsonchecker/fail10.json | 1 + trunk/jsoncpp/test/jsonchecker/fail11.json | 1 + trunk/jsoncpp/test/jsonchecker/fail12.json | 1 + trunk/jsoncpp/test/jsonchecker/fail13.json | 1 + trunk/jsoncpp/test/jsonchecker/fail14.json | 1 + trunk/jsoncpp/test/jsonchecker/fail15.json | 1 + trunk/jsoncpp/test/jsonchecker/fail16.json | 1 + trunk/jsoncpp/test/jsonchecker/fail17.json | 1 + trunk/jsoncpp/test/jsonchecker/fail18.json | 1 + trunk/jsoncpp/test/jsonchecker/fail19.json | 1 + trunk/jsoncpp/test/jsonchecker/fail2.json | 1 + trunk/jsoncpp/test/jsonchecker/fail20.json | 1 + trunk/jsoncpp/test/jsonchecker/fail21.json | 1 + trunk/jsoncpp/test/jsonchecker/fail22.json | 1 + trunk/jsoncpp/test/jsonchecker/fail23.json | 1 + trunk/jsoncpp/test/jsonchecker/fail24.json | 1 + trunk/jsoncpp/test/jsonchecker/fail25.json | 1 + trunk/jsoncpp/test/jsonchecker/fail26.json | 1 + trunk/jsoncpp/test/jsonchecker/fail27.json | 2 + trunk/jsoncpp/test/jsonchecker/fail28.json | 2 + trunk/jsoncpp/test/jsonchecker/fail29.json | 1 + trunk/jsoncpp/test/jsonchecker/fail3.json | 1 + trunk/jsoncpp/test/jsonchecker/fail30.json | 1 + trunk/jsoncpp/test/jsonchecker/fail31.json | 1 + trunk/jsoncpp/test/jsonchecker/fail32.json | 1 + trunk/jsoncpp/test/jsonchecker/fail33.json | 1 + trunk/jsoncpp/test/jsonchecker/fail4.json | 1 + trunk/jsoncpp/test/jsonchecker/fail5.json | 1 + trunk/jsoncpp/test/jsonchecker/fail6.json | 1 + trunk/jsoncpp/test/jsonchecker/fail7.json | 1 + trunk/jsoncpp/test/jsonchecker/fail8.json | 1 + trunk/jsoncpp/test/jsonchecker/fail9.json | 1 + trunk/jsoncpp/test/jsonchecker/pass1.json | 58 +++++++++++++++ trunk/jsoncpp/test/jsonchecker/pass2.json | 1 + trunk/jsoncpp/test/jsonchecker/pass3.json | 6 ++ trunk/jsoncpp/test/jsonchecker/readme.txt | 3 + trunk/jsoncpp/test/runjsontests.py | 9 ++- 46 files changed, 307 insertions(+), 27 deletions(-) create mode 100644 trunk/jsoncpp/include/json/features.h create mode 100644 trunk/jsoncpp/test/jsonchecker/fail1.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail10.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail11.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail12.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail13.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail14.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail15.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail16.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail17.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail18.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail19.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail2.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail20.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail21.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail22.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail23.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail24.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail25.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail26.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail27.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail28.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail29.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail3.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail30.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail31.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail32.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail33.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail4.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail5.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail6.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail7.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail8.json create mode 100644 trunk/jsoncpp/test/jsonchecker/fail9.json create mode 100644 trunk/jsoncpp/test/jsonchecker/pass1.json create mode 100644 trunk/jsoncpp/test/jsonchecker/pass2.json create mode 100644 trunk/jsoncpp/test/jsonchecker/pass3.json create mode 100644 trunk/jsoncpp/test/jsonchecker/readme.txt diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index f3aaa8e..5651d37 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -42,3 +42,6 @@ and TARGET may be: doc: build documentation doc-dist: build documentation tarball +To run the test manually: +cd test +python runjsontests.py "path to jsontest.exe" diff --git a/trunk/jsoncpp/include/json/features.h b/trunk/jsoncpp/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/trunk/jsoncpp/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index 3372a55..ee76071 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -9,6 +9,9 @@ namespace Json { class Reader; class StyledWriter; + // features.h + class Features; + // value.h class StaticString; class Path; diff --git a/trunk/jsoncpp/include/json/json.h b/trunk/jsoncpp/include/json/json.h index a539740..c71ed65 100644 --- a/trunk/jsoncpp/include/json/json.h +++ b/trunk/jsoncpp/include/json/json.h @@ -5,5 +5,6 @@ # include "value.h" # include "reader.h" # include "writer.h" +# include "features.h" #endif // JSON_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index e113569..ee1d6a2 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -1,7 +1,7 @@ #ifndef CPPTL_JSON_READER_H_INCLUDED # define CPPTL_JSON_READER_H_INCLUDED -# include "forwards.h" +# include "features.h" # include "value.h" # include # include @@ -10,10 +10,7 @@ namespace Json { - class Value; - /** \brief Unserialize a JSON document into a Value. - * * */ class JSON_API Reader @@ -22,14 +19,24 @@ namespace Json { typedef char Char; typedef const Char *Location; + /** \brief Constructs a Reader allowing all features + * for parsing. + */ Reader(); + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + /** \brief Read a Value from a JSON document. * \param document UTF-8 encoded string containing the document to read. * \param root [out] Contains the root value of the document if it was * successfully parsed. * \param collectComments \c true to collect comment and allow writing them back during * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. * \return \c true if the document was successfully parsed, \c false if an error occurred. */ bool parse( const std::string &document, @@ -42,6 +49,8 @@ namespace Json { * successfully parsed. * \param collectComments \c true to collect comment and allow writing them back during * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. * \return \c true if the document was successfully parsed, \c false if an error occurred. */ bool parse( const char *beginDoc, const char *endDoc, @@ -50,7 +59,7 @@ namespace Json { /// \brief Parse from input stream. /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream&, + bool parse( std::istream &is, Value &root, bool collectComments = true ); @@ -152,6 +161,7 @@ namespace Json { Location lastValueEnd_; Value *lastValue_; std::string commentsBefore_; + Features features_; bool collectComments_; }; diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 88ed2f1..231ee0c 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -86,9 +86,11 @@ static int parseAndSaveValueTree( const std::string &input, const std::string &actual, const std::string &kind, - Json::Value &root ) + Json::Value &root, + const Json::Features &features, + bool parseOnly ) { - Json::Reader reader; + Json::Reader reader( features ); bool parsingSuccessful = reader.parse( input, root ); if ( !parsingSuccessful ) { @@ -98,14 +100,17 @@ parseAndSaveValueTree( const std::string &input, return 1; } - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) + if ( !parseOnly ) { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); } - printValueTree( factual, root ); - fclose( factual ); return 0; } @@ -143,25 +148,65 @@ removeSuffix( const std::string &path, return path.substr( 0, path.length() - extension.length() ); } +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + int main( int argc, const char *argv[] ) { - if ( argc != 2 ) + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) { - printf( "Usage: %s input-json-file", argv[0] ); - return 3; + return exitCode; } - std::string input = readInputTestFile( argv[1] ); + std::string input = readInputTestFile( path.c_str() ); if ( input.empty() ) { - printf( "Failed to read input or empty input: %s\n", argv[1] ); + printf( "Failed to read input or empty input: %s\n", path.c_str() ); return 3; } std::string basePath = removeSuffix( argv[1], ".json" ); - if ( basePath.empty() ) + if ( !parseOnly && basePath.empty() ) { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", argv[1] ); + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); return 3; } @@ -170,15 +215,16 @@ int main( int argc, const char *argv[] ) std::string rewriteActualPath = basePath + ".actual-rewrite"; Json::Value root; - int exitCode = parseAndSaveValueTree( input, actualPath, "input", root ); - if ( exitCode == 0 ) + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) { std::string rewrite; exitCode = rewriteValueTree( rewritePath, root, rewrite ); if ( exitCode == 0 ) { Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, "rewrite", rewriteRoot ); + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); } } diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 0e0c2ff..7addb3b 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -13,6 +13,36 @@ namespace Json { +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + static inline bool in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) { @@ -77,9 +107,17 @@ static std::string codePointToUTF8(unsigned int cp) // ////////////////////////////////////////////////////////////////// Reader::Reader() + : features_( Features::all() ) { } + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + bool Reader::parse( const std::string &document, Value &root, @@ -91,6 +129,7 @@ Reader::parse( const std::string &document, return parse( begin, end, root, collectComments ); } + bool Reader::parse( std::istream& sin, Value &root, @@ -113,6 +152,11 @@ Reader::parse( const char *beginDoc, const char *endDoc, Value &root, bool collectComments ) { + if ( !features_.allowComments_ ) + { + collectComments = false; + } + begin_ = beginDoc; end_ = endDoc; collectComments_ = collectComments; @@ -130,6 +174,19 @@ Reader::parse( const char *beginDoc, const char *endDoc, skipCommentTokens( token ); if ( collectComments_ && !commentsBefore_.empty() ) root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } return successful; } @@ -188,11 +245,18 @@ Reader::readValue() void Reader::skipCommentTokens( Token &token ) { - do + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else { readToken( token ); } - while ( token.type_ == tokenComment ); } diff --git a/trunk/jsoncpp/src/lib_json/lib_json.vcproj b/trunk/jsoncpp/src/lib_json/lib_json.vcproj index 3fa9cf0..aa47ffa 100644 --- a/trunk/jsoncpp/src/lib_json/lib_json.vcproj +++ b/trunk/jsoncpp/src/lib_json/lib_json.vcproj @@ -169,6 +169,9 @@ + + diff --git a/trunk/jsoncpp/test/jsonchecker/fail1.json b/trunk/jsoncpp/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail10.json b/trunk/jsoncpp/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail11.json b/trunk/jsoncpp/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail12.json b/trunk/jsoncpp/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail13.json b/trunk/jsoncpp/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail14.json b/trunk/jsoncpp/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail15.json b/trunk/jsoncpp/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail16.json b/trunk/jsoncpp/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail17.json b/trunk/jsoncpp/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail18.json b/trunk/jsoncpp/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail19.json b/trunk/jsoncpp/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail2.json b/trunk/jsoncpp/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail20.json b/trunk/jsoncpp/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail21.json b/trunk/jsoncpp/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail22.json b/trunk/jsoncpp/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail23.json b/trunk/jsoncpp/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail24.json b/trunk/jsoncpp/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail25.json b/trunk/jsoncpp/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail26.json b/trunk/jsoncpp/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail27.json b/trunk/jsoncpp/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail28.json b/trunk/jsoncpp/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail29.json b/trunk/jsoncpp/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail3.json b/trunk/jsoncpp/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail30.json b/trunk/jsoncpp/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail31.json b/trunk/jsoncpp/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail32.json b/trunk/jsoncpp/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail33.json b/trunk/jsoncpp/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail4.json b/trunk/jsoncpp/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail5.json b/trunk/jsoncpp/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail6.json b/trunk/jsoncpp/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail7.json b/trunk/jsoncpp/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail8.json b/trunk/jsoncpp/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/fail9.json b/trunk/jsoncpp/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/pass1.json b/trunk/jsoncpp/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/pass2.json b/trunk/jsoncpp/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/trunk/jsoncpp/test/jsonchecker/pass3.json b/trunk/jsoncpp/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/trunk/jsoncpp/test/jsonchecker/readme.txt b/trunk/jsoncpp/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/trunk/jsoncpp/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index 2cf37e2..fa85972 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -3,6 +3,7 @@ import os.path from glob import glob +RUN_JSONCHECKER = True def compareOutputs( expected, actual, message ): expected = expected.strip().replace('\r','').split('\n') @@ -39,7 +40,10 @@ def runAllTests( jsontest_executable_path, input_dir = None ): if not input_dir: input_dir = os.getcwd() tests = glob( os.path.join( input_dir, '*.json' ) ) - test_jsonchecker = glob( os.path.join( input_dir, 'jsonchecker', '*.json' ) ) + if RUN_JSONCHECKER: + test_jsonchecker = glob( os.path.join( input_dir, 'jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] failed_tests = [] for input_path in tests + test_jsonchecker: is_json_checker_test = input_path in test_jsonchecker @@ -54,7 +58,8 @@ def runAllTests( jsontest_executable_path, input_dir = None ): if expect_failure: if status is None: print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed') ) + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) else: print 'OK' else: From 0f872c9afaa0358b766dfa5d943076532beaaa92 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 18 Nov 2009 22:25:34 +0000 Subject: [PATCH 077/268] Fixed comment after value in object value signaled by Frederic Surleau. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@77 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 12 +++++++++--- trunk/jsoncpp/test/runjsontests.py | 2 +- trunk/jsoncpp/test/test_comment_01.expected | 8 ++++++++ trunk/jsoncpp/test/test_comment_01.json | 8 ++++++++ 4 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 trunk/jsoncpp/test/test_comment_01.expected create mode 100644 trunk/jsoncpp/test/test_comment_01.json diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 7addb3b..4eb2d11 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -557,9 +557,15 @@ Reader::readArray( Token &tokenStart ) return recoverFromError( tokenArrayEnd ); Token token; - if ( !readToken( token ) - || ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ) ) + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) { return addErrorAndRecover( "Missing ',' or ']' in array declaration", token, diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index fa85972..5c8c7c7 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -3,7 +3,7 @@ import os.path from glob import glob -RUN_JSONCHECKER = True +RUN_JSONCHECKER = False def compareOutputs( expected, actual, message ): expected = expected.strip().replace('\r','').split('\n') diff --git a/trunk/jsoncpp/test/test_comment_01.expected b/trunk/jsoncpp/test/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/trunk/jsoncpp/test/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/trunk/jsoncpp/test/test_comment_01.json b/trunk/jsoncpp/test/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/trunk/jsoncpp/test/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} From ba0f49d343d8f413f11874c0dc8f695a7c7d0760 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 12:07:58 +0000 Subject: [PATCH 078/268] - fixed build issue with mingw (bug 2834969) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@78 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index de06710..ae7b7e7 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -115,7 +115,18 @@ if not os.path.exists( sconsign_dir_path ): # Store all dependencies signature in a database SConsignFile( sconsign_path ) -env = Environment( ENV = {'PATH' : os.environ['PATH']}, +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), toolpath = ['scons-tools'], tools=[] ) #, tools=['default'] ) From c28cd7a6525ce4ac0328aa2cf225e092a3ffff59 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 12:19:07 +0000 Subject: [PATCH 079/268] - fixed build issue on AIX. Build both shared and static library is now only activated on Linux. Building static library on other platforms. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@79 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index ae7b7e7..8728eff 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -148,32 +148,28 @@ elif platform == 'msvc6': for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' - env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc70': env['MSVS_VERSION']='7.0' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' - env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc71': env['MSVS_VERSION']='7.1' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -GX /nologo /MT' - env['SHARED_LIB_ENABLED'] = False elif platform == 'msvc80': env['MSVS_VERSION']='8.0' for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -EHsc /nologo /MT' - env['SHARED_LIB_ENABLED'] = False elif platform == 'mingw': env.Tool( 'mingw' ) env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) - env['SHARED_LIB_ENABLED'] = False elif platform.startswith('linux-gcc'): env.Tool( 'default' ) env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True else: print "UNSUPPORTED PLATFORM." env.Exit(1) @@ -191,7 +187,9 @@ if short_platform.startswith('msvc'): short_platform = short_platform[2:] # Notes: on Windows you need to rebuild the source for each variant # Build script does not support that yet so we only build static libraries. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', True) +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) env['LIB_PLATFORM'] = short_platform env['LIB_LINK_TYPE'] = 'lib' # static env['LIB_CRUNTIME'] = 'mt' From 0d5b7922e981bd670d0988583504a42cd4e667f3 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 13:05:54 +0000 Subject: [PATCH 080/268] - fixed build issue on Solaris (wrong compilation flags for multi-threading) - fixed build issue on Linux redhat 3: python does not has tarfile module git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@80 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 21 ++++++++----- trunk/jsoncpp/doc/sconscript | 11 +++---- trunk/jsoncpp/scons-tools/srcdist.py | 7 +++-- trunk/jsoncpp/scons-tools/targz.py | 44 +++++++++++++++------------- 4 files changed, 48 insertions(+), 35 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 8728eff..be142c3 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -134,7 +134,7 @@ if platform == 'suncc': env.Tool( 'sunc++' ) env.Tool( 'sunlink' ) env.Tool( 'sunar' ) - env.Append( LIBS = ['pthreads'] ) + env.Append( CCFLAGS = ['-mt'] ) elif platform == 'vacpp': env.Tool( 'default' ) env.Tool( 'aixcc' ) @@ -198,14 +198,21 @@ env['JSONCPP_VERSION'] = JSONCPP_VERSION env['BUILD_DIR'] = env.Dir(build_dir) env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) env['DIST_DIR'] = DIST_DIR -class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass env['SRCDIST_ADD'] = SrcDistAdder( env ) env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) -env['SRCDIST_BUILDER'] = env.TarGz env_testing = env.Clone( ) env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) diff --git a/trunk/jsoncpp/doc/sconscript b/trunk/jsoncpp/doc/sconscript index 62b481e..7b78a21 100644 --- a/trunk/jsoncpp/doc/sconscript +++ b/trunk/jsoncpp/doc/sconscript @@ -31,11 +31,12 @@ if 'doxygen' in env['TOOLS']: target = os.path.join( html_doc_path, 'index.html' ) ) alias_doc_cmd = env.Alias('doc', doc_nodes ) env.Alias('doc', env.Install( html_doc_path, '#README.txt' ) ) - targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % html_dir ) - zip_doc_cmd = env.TarGz( targz_path, [env.Dir(html_doc_path)], - TARGZ_BASEDIR = env['ROOTBUILD_DIR'] ) - env.Depends( zip_doc_cmd, alias_doc_cmd ) - env.Alias( 'doc-dist', zip_doc_cmd ) + if 'TarGz' in env['BUILDERS']: + targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % html_dir ) + zip_doc_cmd = env.TarGz( targz_path, [env.Dir(html_doc_path)], + TARGZ_BASEDIR = env['ROOTBUILD_DIR'] ) + env.Depends( zip_doc_cmd, alias_doc_cmd ) + env.Alias( 'doc-dist', zip_doc_cmd ) ## ## doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', ## SUBST_DICT = { diff --git a/trunk/jsoncpp/scons-tools/srcdist.py b/trunk/jsoncpp/scons-tools/srcdist.py index 3c6a151..864ff40 100644 --- a/trunk/jsoncpp/scons-tools/srcdist.py +++ b/trunk/jsoncpp/scons-tools/srcdist.py @@ -167,12 +167,13 @@ def generate(env): ## scan_check = DoxySourceScanCheck, ## ) - srcdist_builder = targz.makeBuilder( srcDistEmitter ) + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) - env['BUILDERS']['SrcDist'] = srcdist_builder + env['BUILDERS']['SrcDist'] = srcdist_builder def exists(env): """ Make sure srcdist exists. """ - return True + return targz.exists(env) diff --git a/trunk/jsoncpp/scons-tools/targz.py b/trunk/jsoncpp/scons-tools/targz.py index a655b11..f543200 100644 --- a/trunk/jsoncpp/scons-tools/targz.py +++ b/trunk/jsoncpp/scons-tools/targz.py @@ -51,28 +51,32 @@ def visit(tar, dirname, names): tar.add(source_path, archive_name(source_path) ) # filename, arcname tar.close() -targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) -def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) -TarGzBuilder = makeBuilder() + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass -def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. def exists(env): return internal_targz From 8360d2aa06faac82ffb4cbf4e7fe166758717743 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 13:29:25 +0000 Subject: [PATCH 081/268] Moved Visual Studio projects file under makefiles/vs71 to allow usage of multiple versions on the same source tree. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@81 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/makefiles/vs71/jsoncpp.sln | 9 +++++++-- .../vs71}/jsontest.vcproj | 2 +- .../lib_json => makefiles/vs71}/lib_json.vcproj | 14 +++++++------- 3 files changed, 15 insertions(+), 10 deletions(-) rename trunk/jsoncpp/{src/jsontestrunner => makefiles/vs71}/jsontest.vcproj (94%) rename trunk/jsoncpp/{src/lib_json => makefiles/vs71}/lib_json.vcproj (92%) diff --git a/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln b/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln index 613c267..eb1e81e 100644 --- a/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln +++ b/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln @@ -1,9 +1,9 @@ Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "..\..\src\lib_json\lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" ProjectSection(ProjectDependencies) = postProject EndProjectSection EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "..\..\src\jsontestrunner\jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" ProjectSection(ProjectDependencies) = postProject {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} EndProjectSection @@ -11,15 +11,20 @@ EndProject Global GlobalSection(SolutionConfiguration) = preSolution Debug = Debug + dummy = dummy Release = Release EndGlobalSection GlobalSection(ProjectConfiguration) = postSolution {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 EndGlobalSection diff --git a/trunk/jsoncpp/src/jsontestrunner/jsontest.vcproj b/trunk/jsoncpp/makefiles/vs71/jsontest.vcproj similarity index 94% rename from trunk/jsoncpp/src/jsontestrunner/jsontest.vcproj rename to trunk/jsoncpp/makefiles/vs71/jsontest.vcproj index f86b27e..99a4dd6 100644 --- a/trunk/jsoncpp/src/jsontestrunner/jsontest.vcproj +++ b/trunk/jsoncpp/makefiles/vs71/jsontest.vcproj @@ -111,7 +111,7 @@ + RelativePath="..\..\src\jsontestrunner\main.cpp"> diff --git a/trunk/jsoncpp/src/lib_json/lib_json.vcproj b/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj similarity index 92% rename from trunk/jsoncpp/src/lib_json/lib_json.vcproj rename to trunk/jsoncpp/makefiles/vs71/lib_json.vcproj index aa47ffa..1aa5978 100644 --- a/trunk/jsoncpp/src/lib_json/lib_json.vcproj +++ b/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj @@ -179,25 +179,25 @@ RelativePath="..\..\include\json\json.h"> + RelativePath="..\..\src\lib_json\json_batchallocator.h"> + RelativePath="..\..\src\lib_json\json_internalarray.inl"> + RelativePath="..\..\src\lib_json\json_internalmap.inl"> + RelativePath="..\..\src\lib_json\json_reader.cpp"> + RelativePath="..\..\src\lib_json\json_value.cpp"> + RelativePath="..\..\src\lib_json\json_valueiterator.inl"> + RelativePath="..\..\src\lib_json\json_writer.cpp"> From e882ed579c884aaba71a68da5baee4a4204a0fc7 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 19:03:14 +0000 Subject: [PATCH 082/268] Fixed documentation example. Bounced version number. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@82 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 2 +- trunk/jsoncpp/doc/jsoncpp.dox | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index be142c3..a22d4c9 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -73,7 +73,7 @@ import os import os.path import sys -JSONCPP_VERSION = '0.1' +JSONCPP_VERSION = '0.2' DIST_DIR = '#dist' options = Variables() diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 34dda5e..fc7b530 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -22,7 +22,7 @@ Here is an example of JSON data: ], // Tab indent size - indent : { length : 3, use_space = true } + "indent" : { "length" : 3, "use_space" = true } } \endverbatim From 1c0194821ca1dd01b3bca7309d0b9836d18634c9 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 19:13:18 +0000 Subject: [PATCH 083/268] Fixed compilation issue with vs2005 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@83 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_valueiterator.inl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index 9c8e445..fdc52f6 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -12,7 +12,7 @@ ValueIteratorBase::ValueIteratorBase() #ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_( 0 ) + : current_() #else # error fix me // Need to handle uninitialized iterator comparison for experimental maps #endif From b962655ca75b66d4372e72b882e2d97d9ffbf63e Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 19 Nov 2009 20:16:59 +0000 Subject: [PATCH 084/268] Added support for valgrind memory checks git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@84 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/runjsontests.py | 39 +++++++++++++++++++++--------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index 5c8c7c7..eba84b1 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -2,8 +2,10 @@ import os import os.path from glob import glob +import optparse RUN_JSONCHECKER = False +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' def compareOutputs( expected, actual, message ): expected = expected.strip().replace('\r','').split('\n') @@ -36,7 +38,8 @@ def safeReadFile( path ): except IOError, e: return '' % (path,e) -def runAllTests( jsontest_executable_path, input_dir = None ): +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False ): if not input_dir: input_dir = os.getcwd() tests = glob( os.path.join( input_dir, '*.json' ) ) @@ -45,12 +48,14 @@ def runAllTests( jsontest_executable_path, input_dir = None ): else: test_jsonchecker = [] failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' for input_path in tests + test_jsonchecker: is_json_checker_test = input_path in test_jsonchecker print 'TESTING:', input_path, options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s %s %s" % (jsontest_executable_path, options, - input_path) ) + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) process_output = pipe.read() status = pipe.close() if is_json_checker_test: @@ -101,15 +106,27 @@ def runAllTests( jsontest_executable_path, input_dir = None ): print 'All %d tests passed.' % len(tests) return 0 -if __name__ == '__main__': - if len(sys.argv) < 1 or len(sys.argv) > 2: - print "Usage: %s jsontest-executable-path [input-testcase-directory]" % sys.argv[0] +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + options.error( 'Must provides at least path to jsontestrunner executable.' ) sys.exit( 1 ) - jsontest_executable_path = os.path.normpath( os.path.abspath( sys.argv[1] ) ) - if len(sys.argv) > 2: - input_path = os.path.normpath( os.path.abspath( sys.argv[2] ) ) + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) else: input_path = None - status = runAllTests( jsontest_executable_path, input_path ) - sys.exit( status ) \ No newline at end of file + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind ) + sys.exit( status ) + +if __name__ == '__main__': + main() From 31efc3b2dc7e416b942470ec09f74b2448dfd278 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 21 Nov 2009 18:07:09 +0000 Subject: [PATCH 085/268] - added mini unit testing framework - added unit tests for Value.isXYZ() and iteration over Value members to compute size git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@85 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 21 +- trunk/jsoncpp/include/json/config.h | 3 + trunk/jsoncpp/makefiles/vs71/jsoncpp.sln | 11 + .../makefiles/vs71/test_lib_json.vcproj | 130 ++++ trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 598 ++++++++++++++++++ trunk/jsoncpp/src/test_lib_json/jsontest.h | 252 ++++++++ trunk/jsoncpp/src/test_lib_json/main.cpp | 244 +++++++ trunk/jsoncpp/src/test_lib_json/sconscript | 10 + trunk/jsoncpp/test/runjsontests.py | 2 +- trunk/jsoncpp/test/rununittests.py | 73 +++ 10 files changed, 1342 insertions(+), 2 deletions(-) create mode 100644 trunk/jsoncpp/makefiles/vs71/test_lib_json.vcproj create mode 100644 trunk/jsoncpp/src/test_lib_json/jsontest.cpp create mode 100644 trunk/jsoncpp/src/test_lib_json/jsontest.h create mode 100644 trunk/jsoncpp/src/test_lib_json/main.cpp create mode 100644 trunk/jsoncpp/src/test_lib_json/sconscript create mode 100644 trunk/jsoncpp/test/rununittests.py diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index a22d4c9..af674b4 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -231,6 +231,12 @@ def buildJSONTests( env, target_sources, target_name ): check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) env.AlwaysBuild( check_alias_target ) +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + def buildLibrary( env, target_sources, target_name ): static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', source=target_sources ) @@ -242,7 +248,7 @@ def buildLibrary( env, target_sources, target_name ): env.Install( lib_dir, shared_lib ) env['SRCDIST_ADD']( source=[target_sources] ) -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' ) +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) def buildProjectInDirectory( target_directory ): global build_dir @@ -266,6 +272,18 @@ import SCons.Action ActionFactory = SCons.Action.ActionFactory RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + env.Alias( 'check' ) srcdist_cmd = env['SRCDIST_ADD']( source = """ @@ -275,6 +293,7 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) buildProjectInDirectory( 'doc' ) #print env.Dump() diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 52de4e4..5d334cb 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -19,6 +19,9 @@ /// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. //# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 # ifdef JSON_IN_CPPTL # include diff --git a/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln b/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln index eb1e81e..5bfa366 100644 --- a/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln +++ b/trunk/jsoncpp/makefiles/vs71/jsoncpp.sln @@ -8,6 +8,11 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} EndProjectSection EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject Global GlobalSection(SolutionConfiguration) = preSolution Debug = Debug @@ -27,6 +32,12 @@ Global {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution EndGlobalSection diff --git a/trunk/jsoncpp/makefiles/vs71/test_lib_json.vcproj b/trunk/jsoncpp/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/trunk/jsoncpp/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..cd219bd --- /dev/null +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -0,0 +1,598 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..7abba56 --- /dev/null +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -0,0 +1,252 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp new file mode 100644 index 0000000..9864178 --- /dev/null +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/trunk/jsoncpp/src/test_lib_json/sconscript b/trunk/jsoncpp/src/test_lib_json/sconscript new file mode 100644 index 0000000..84f56b6 --- /dev/null +++ b/trunk/jsoncpp/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index eba84b1..0dd87e6 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -116,7 +116,7 @@ def main(): options, args = parser.parse_args() if len(args) < 1 or len(args) > 2: - options.error( 'Must provides at least path to jsontestrunner executable.' ) + parser.error( 'Must provides at least path to jsontestrunner executable.' ) sys.exit( 1 ) jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) diff --git a/trunk/jsoncpp/test/rununittests.py b/trunk/jsoncpp/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/trunk/jsoncpp/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() From c5a30811927af746a5a827e3e14ae78140d96f6a Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 21 Nov 2009 18:20:25 +0000 Subject: [PATCH 086/268] Moved jsontestrunner data from test/ to test/data. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@86 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 3 ++- trunk/jsoncpp/test/cleantests.py | 2 +- trunk/jsoncpp/test/{ => data}/test_array_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_01.json | 0 trunk/jsoncpp/test/{ => data}/test_array_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_02.json | 0 trunk/jsoncpp/test/{ => data}/test_array_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_03.json | 0 trunk/jsoncpp/test/{ => data}/test_array_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_04.json | 0 trunk/jsoncpp/test/{ => data}/test_array_05.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_05.json | 0 trunk/jsoncpp/test/{ => data}/test_array_06.expected | 0 trunk/jsoncpp/test/{ => data}/test_array_06.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_01.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_02.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_03.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_04.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_05.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_05.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_06.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_06.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_07.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_07.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_08.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_08.json | 0 trunk/jsoncpp/test/{ => data}/test_basic_09.expected | 0 trunk/jsoncpp/test/{ => data}/test_basic_09.json | 0 trunk/jsoncpp/test/{ => data}/test_comment_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_comment_01.json | 0 trunk/jsoncpp/test/{ => data}/test_complex_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_complex_01.json | 0 trunk/jsoncpp/test/{ => data}/test_integer_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_integer_01.json | 0 trunk/jsoncpp/test/{ => data}/test_integer_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_integer_02.json | 0 trunk/jsoncpp/test/{ => data}/test_integer_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_integer_03.json | 0 trunk/jsoncpp/test/{ => data}/test_integer_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_integer_04.json | 0 trunk/jsoncpp/test/{ => data}/test_integer_05.expected | 0 trunk/jsoncpp/test/{ => data}/test_integer_05.json | 0 trunk/jsoncpp/test/{ => data}/test_object_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_object_01.json | 0 trunk/jsoncpp/test/{ => data}/test_object_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_object_02.json | 0 trunk/jsoncpp/test/{ => data}/test_object_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_object_03.json | 0 trunk/jsoncpp/test/{ => data}/test_object_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_object_04.json | 0 .../jsoncpp/test/{ => data}/test_preserve_comment_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_preserve_comment_01.json | 0 trunk/jsoncpp/test/{ => data}/test_real_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_01.json | 0 trunk/jsoncpp/test/{ => data}/test_real_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_02.json | 0 trunk/jsoncpp/test/{ => data}/test_real_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_03.json | 0 trunk/jsoncpp/test/{ => data}/test_real_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_04.json | 0 trunk/jsoncpp/test/{ => data}/test_real_05.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_05.json | 0 trunk/jsoncpp/test/{ => data}/test_real_06.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_06.json | 0 trunk/jsoncpp/test/{ => data}/test_real_07.expected | 0 trunk/jsoncpp/test/{ => data}/test_real_07.json | 0 trunk/jsoncpp/test/{ => data}/test_string_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_01.json | 0 trunk/jsoncpp/test/{ => data}/test_string_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_02.json | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_01.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_01.json | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_02.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_02.json | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_03.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_03.json | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_04.expected | 0 trunk/jsoncpp/test/{ => data}/test_string_unicode_04.json | 0 trunk/jsoncpp/test/runjsontests.py | 2 +- 83 files changed, 4 insertions(+), 3 deletions(-) rename trunk/jsoncpp/test/{ => data}/test_array_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_array_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_array_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_array_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_04.json (100%) rename trunk/jsoncpp/test/{ => data}/test_array_05.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_05.json (100%) rename trunk/jsoncpp/test/{ => data}/test_array_06.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_array_06.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_04.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_05.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_05.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_06.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_06.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_07.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_07.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_08.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_08.json (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_09.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_basic_09.json (100%) rename trunk/jsoncpp/test/{ => data}/test_comment_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_comment_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_complex_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_complex_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_04.json (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_05.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_integer_05.json (100%) rename trunk/jsoncpp/test/{ => data}/test_object_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_object_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_object_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_object_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_object_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_object_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_object_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_object_04.json (100%) rename trunk/jsoncpp/test/{ => data}/test_preserve_comment_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_preserve_comment_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_04.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_05.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_05.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_06.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_06.json (100%) rename trunk/jsoncpp/test/{ => data}/test_real_07.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_real_07.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_01.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_01.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_02.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_02.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_03.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_03.json (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_04.expected (100%) rename trunk/jsoncpp/test/{ => data}/test_string_unicode_04.json (100%) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index af674b4..00d3741 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -262,8 +262,9 @@ def runJSONTests_action( target, source = None, env = None ): # Add test scripts to python path jsontest_path = Dir( '#test' ).abspath sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), jsontest_path ) + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) def runJSONTests_string( target, source = None, env = None ): return 'RunJSONTests("%s")' % source[0] diff --git a/trunk/jsoncpp/test/cleantests.py b/trunk/jsoncpp/test/cleantests.py index e5f99e6..c38fd8f 100644 --- a/trunk/jsoncpp/test/cleantests.py +++ b/trunk/jsoncpp/test/cleantests.py @@ -4,7 +4,7 @@ paths = [] for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( pattern ) + paths += glob.glob( 'data/' + pattern ) for path in paths: os.unlink( path ) diff --git a/trunk/jsoncpp/test/test_array_01.expected b/trunk/jsoncpp/test/data/test_array_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_01.expected rename to trunk/jsoncpp/test/data/test_array_01.expected diff --git a/trunk/jsoncpp/test/test_array_01.json b/trunk/jsoncpp/test/data/test_array_01.json similarity index 100% rename from trunk/jsoncpp/test/test_array_01.json rename to trunk/jsoncpp/test/data/test_array_01.json diff --git a/trunk/jsoncpp/test/test_array_02.expected b/trunk/jsoncpp/test/data/test_array_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_02.expected rename to trunk/jsoncpp/test/data/test_array_02.expected diff --git a/trunk/jsoncpp/test/test_array_02.json b/trunk/jsoncpp/test/data/test_array_02.json similarity index 100% rename from trunk/jsoncpp/test/test_array_02.json rename to trunk/jsoncpp/test/data/test_array_02.json diff --git a/trunk/jsoncpp/test/test_array_03.expected b/trunk/jsoncpp/test/data/test_array_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_03.expected rename to trunk/jsoncpp/test/data/test_array_03.expected diff --git a/trunk/jsoncpp/test/test_array_03.json b/trunk/jsoncpp/test/data/test_array_03.json similarity index 100% rename from trunk/jsoncpp/test/test_array_03.json rename to trunk/jsoncpp/test/data/test_array_03.json diff --git a/trunk/jsoncpp/test/test_array_04.expected b/trunk/jsoncpp/test/data/test_array_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_04.expected rename to trunk/jsoncpp/test/data/test_array_04.expected diff --git a/trunk/jsoncpp/test/test_array_04.json b/trunk/jsoncpp/test/data/test_array_04.json similarity index 100% rename from trunk/jsoncpp/test/test_array_04.json rename to trunk/jsoncpp/test/data/test_array_04.json diff --git a/trunk/jsoncpp/test/test_array_05.expected b/trunk/jsoncpp/test/data/test_array_05.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_05.expected rename to trunk/jsoncpp/test/data/test_array_05.expected diff --git a/trunk/jsoncpp/test/test_array_05.json b/trunk/jsoncpp/test/data/test_array_05.json similarity index 100% rename from trunk/jsoncpp/test/test_array_05.json rename to trunk/jsoncpp/test/data/test_array_05.json diff --git a/trunk/jsoncpp/test/test_array_06.expected b/trunk/jsoncpp/test/data/test_array_06.expected similarity index 100% rename from trunk/jsoncpp/test/test_array_06.expected rename to trunk/jsoncpp/test/data/test_array_06.expected diff --git a/trunk/jsoncpp/test/test_array_06.json b/trunk/jsoncpp/test/data/test_array_06.json similarity index 100% rename from trunk/jsoncpp/test/test_array_06.json rename to trunk/jsoncpp/test/data/test_array_06.json diff --git a/trunk/jsoncpp/test/test_basic_01.expected b/trunk/jsoncpp/test/data/test_basic_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_01.expected rename to trunk/jsoncpp/test/data/test_basic_01.expected diff --git a/trunk/jsoncpp/test/test_basic_01.json b/trunk/jsoncpp/test/data/test_basic_01.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_01.json rename to trunk/jsoncpp/test/data/test_basic_01.json diff --git a/trunk/jsoncpp/test/test_basic_02.expected b/trunk/jsoncpp/test/data/test_basic_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_02.expected rename to trunk/jsoncpp/test/data/test_basic_02.expected diff --git a/trunk/jsoncpp/test/test_basic_02.json b/trunk/jsoncpp/test/data/test_basic_02.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_02.json rename to trunk/jsoncpp/test/data/test_basic_02.json diff --git a/trunk/jsoncpp/test/test_basic_03.expected b/trunk/jsoncpp/test/data/test_basic_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_03.expected rename to trunk/jsoncpp/test/data/test_basic_03.expected diff --git a/trunk/jsoncpp/test/test_basic_03.json b/trunk/jsoncpp/test/data/test_basic_03.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_03.json rename to trunk/jsoncpp/test/data/test_basic_03.json diff --git a/trunk/jsoncpp/test/test_basic_04.expected b/trunk/jsoncpp/test/data/test_basic_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_04.expected rename to trunk/jsoncpp/test/data/test_basic_04.expected diff --git a/trunk/jsoncpp/test/test_basic_04.json b/trunk/jsoncpp/test/data/test_basic_04.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_04.json rename to trunk/jsoncpp/test/data/test_basic_04.json diff --git a/trunk/jsoncpp/test/test_basic_05.expected b/trunk/jsoncpp/test/data/test_basic_05.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_05.expected rename to trunk/jsoncpp/test/data/test_basic_05.expected diff --git a/trunk/jsoncpp/test/test_basic_05.json b/trunk/jsoncpp/test/data/test_basic_05.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_05.json rename to trunk/jsoncpp/test/data/test_basic_05.json diff --git a/trunk/jsoncpp/test/test_basic_06.expected b/trunk/jsoncpp/test/data/test_basic_06.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_06.expected rename to trunk/jsoncpp/test/data/test_basic_06.expected diff --git a/trunk/jsoncpp/test/test_basic_06.json b/trunk/jsoncpp/test/data/test_basic_06.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_06.json rename to trunk/jsoncpp/test/data/test_basic_06.json diff --git a/trunk/jsoncpp/test/test_basic_07.expected b/trunk/jsoncpp/test/data/test_basic_07.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_07.expected rename to trunk/jsoncpp/test/data/test_basic_07.expected diff --git a/trunk/jsoncpp/test/test_basic_07.json b/trunk/jsoncpp/test/data/test_basic_07.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_07.json rename to trunk/jsoncpp/test/data/test_basic_07.json diff --git a/trunk/jsoncpp/test/test_basic_08.expected b/trunk/jsoncpp/test/data/test_basic_08.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_08.expected rename to trunk/jsoncpp/test/data/test_basic_08.expected diff --git a/trunk/jsoncpp/test/test_basic_08.json b/trunk/jsoncpp/test/data/test_basic_08.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_08.json rename to trunk/jsoncpp/test/data/test_basic_08.json diff --git a/trunk/jsoncpp/test/test_basic_09.expected b/trunk/jsoncpp/test/data/test_basic_09.expected similarity index 100% rename from trunk/jsoncpp/test/test_basic_09.expected rename to trunk/jsoncpp/test/data/test_basic_09.expected diff --git a/trunk/jsoncpp/test/test_basic_09.json b/trunk/jsoncpp/test/data/test_basic_09.json similarity index 100% rename from trunk/jsoncpp/test/test_basic_09.json rename to trunk/jsoncpp/test/data/test_basic_09.json diff --git a/trunk/jsoncpp/test/test_comment_01.expected b/trunk/jsoncpp/test/data/test_comment_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_comment_01.expected rename to trunk/jsoncpp/test/data/test_comment_01.expected diff --git a/trunk/jsoncpp/test/test_comment_01.json b/trunk/jsoncpp/test/data/test_comment_01.json similarity index 100% rename from trunk/jsoncpp/test/test_comment_01.json rename to trunk/jsoncpp/test/data/test_comment_01.json diff --git a/trunk/jsoncpp/test/test_complex_01.expected b/trunk/jsoncpp/test/data/test_complex_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_complex_01.expected rename to trunk/jsoncpp/test/data/test_complex_01.expected diff --git a/trunk/jsoncpp/test/test_complex_01.json b/trunk/jsoncpp/test/data/test_complex_01.json similarity index 100% rename from trunk/jsoncpp/test/test_complex_01.json rename to trunk/jsoncpp/test/data/test_complex_01.json diff --git a/trunk/jsoncpp/test/test_integer_01.expected b/trunk/jsoncpp/test/data/test_integer_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_integer_01.expected rename to trunk/jsoncpp/test/data/test_integer_01.expected diff --git a/trunk/jsoncpp/test/test_integer_01.json b/trunk/jsoncpp/test/data/test_integer_01.json similarity index 100% rename from trunk/jsoncpp/test/test_integer_01.json rename to trunk/jsoncpp/test/data/test_integer_01.json diff --git a/trunk/jsoncpp/test/test_integer_02.expected b/trunk/jsoncpp/test/data/test_integer_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_integer_02.expected rename to trunk/jsoncpp/test/data/test_integer_02.expected diff --git a/trunk/jsoncpp/test/test_integer_02.json b/trunk/jsoncpp/test/data/test_integer_02.json similarity index 100% rename from trunk/jsoncpp/test/test_integer_02.json rename to trunk/jsoncpp/test/data/test_integer_02.json diff --git a/trunk/jsoncpp/test/test_integer_03.expected b/trunk/jsoncpp/test/data/test_integer_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_integer_03.expected rename to trunk/jsoncpp/test/data/test_integer_03.expected diff --git a/trunk/jsoncpp/test/test_integer_03.json b/trunk/jsoncpp/test/data/test_integer_03.json similarity index 100% rename from trunk/jsoncpp/test/test_integer_03.json rename to trunk/jsoncpp/test/data/test_integer_03.json diff --git a/trunk/jsoncpp/test/test_integer_04.expected b/trunk/jsoncpp/test/data/test_integer_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_integer_04.expected rename to trunk/jsoncpp/test/data/test_integer_04.expected diff --git a/trunk/jsoncpp/test/test_integer_04.json b/trunk/jsoncpp/test/data/test_integer_04.json similarity index 100% rename from trunk/jsoncpp/test/test_integer_04.json rename to trunk/jsoncpp/test/data/test_integer_04.json diff --git a/trunk/jsoncpp/test/test_integer_05.expected b/trunk/jsoncpp/test/data/test_integer_05.expected similarity index 100% rename from trunk/jsoncpp/test/test_integer_05.expected rename to trunk/jsoncpp/test/data/test_integer_05.expected diff --git a/trunk/jsoncpp/test/test_integer_05.json b/trunk/jsoncpp/test/data/test_integer_05.json similarity index 100% rename from trunk/jsoncpp/test/test_integer_05.json rename to trunk/jsoncpp/test/data/test_integer_05.json diff --git a/trunk/jsoncpp/test/test_object_01.expected b/trunk/jsoncpp/test/data/test_object_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_object_01.expected rename to trunk/jsoncpp/test/data/test_object_01.expected diff --git a/trunk/jsoncpp/test/test_object_01.json b/trunk/jsoncpp/test/data/test_object_01.json similarity index 100% rename from trunk/jsoncpp/test/test_object_01.json rename to trunk/jsoncpp/test/data/test_object_01.json diff --git a/trunk/jsoncpp/test/test_object_02.expected b/trunk/jsoncpp/test/data/test_object_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_object_02.expected rename to trunk/jsoncpp/test/data/test_object_02.expected diff --git a/trunk/jsoncpp/test/test_object_02.json b/trunk/jsoncpp/test/data/test_object_02.json similarity index 100% rename from trunk/jsoncpp/test/test_object_02.json rename to trunk/jsoncpp/test/data/test_object_02.json diff --git a/trunk/jsoncpp/test/test_object_03.expected b/trunk/jsoncpp/test/data/test_object_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_object_03.expected rename to trunk/jsoncpp/test/data/test_object_03.expected diff --git a/trunk/jsoncpp/test/test_object_03.json b/trunk/jsoncpp/test/data/test_object_03.json similarity index 100% rename from trunk/jsoncpp/test/test_object_03.json rename to trunk/jsoncpp/test/data/test_object_03.json diff --git a/trunk/jsoncpp/test/test_object_04.expected b/trunk/jsoncpp/test/data/test_object_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_object_04.expected rename to trunk/jsoncpp/test/data/test_object_04.expected diff --git a/trunk/jsoncpp/test/test_object_04.json b/trunk/jsoncpp/test/data/test_object_04.json similarity index 100% rename from trunk/jsoncpp/test/test_object_04.json rename to trunk/jsoncpp/test/data/test_object_04.json diff --git a/trunk/jsoncpp/test/test_preserve_comment_01.expected b/trunk/jsoncpp/test/data/test_preserve_comment_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_preserve_comment_01.expected rename to trunk/jsoncpp/test/data/test_preserve_comment_01.expected diff --git a/trunk/jsoncpp/test/test_preserve_comment_01.json b/trunk/jsoncpp/test/data/test_preserve_comment_01.json similarity index 100% rename from trunk/jsoncpp/test/test_preserve_comment_01.json rename to trunk/jsoncpp/test/data/test_preserve_comment_01.json diff --git a/trunk/jsoncpp/test/test_real_01.expected b/trunk/jsoncpp/test/data/test_real_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_01.expected rename to trunk/jsoncpp/test/data/test_real_01.expected diff --git a/trunk/jsoncpp/test/test_real_01.json b/trunk/jsoncpp/test/data/test_real_01.json similarity index 100% rename from trunk/jsoncpp/test/test_real_01.json rename to trunk/jsoncpp/test/data/test_real_01.json diff --git a/trunk/jsoncpp/test/test_real_02.expected b/trunk/jsoncpp/test/data/test_real_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_02.expected rename to trunk/jsoncpp/test/data/test_real_02.expected diff --git a/trunk/jsoncpp/test/test_real_02.json b/trunk/jsoncpp/test/data/test_real_02.json similarity index 100% rename from trunk/jsoncpp/test/test_real_02.json rename to trunk/jsoncpp/test/data/test_real_02.json diff --git a/trunk/jsoncpp/test/test_real_03.expected b/trunk/jsoncpp/test/data/test_real_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_03.expected rename to trunk/jsoncpp/test/data/test_real_03.expected diff --git a/trunk/jsoncpp/test/test_real_03.json b/trunk/jsoncpp/test/data/test_real_03.json similarity index 100% rename from trunk/jsoncpp/test/test_real_03.json rename to trunk/jsoncpp/test/data/test_real_03.json diff --git a/trunk/jsoncpp/test/test_real_04.expected b/trunk/jsoncpp/test/data/test_real_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_04.expected rename to trunk/jsoncpp/test/data/test_real_04.expected diff --git a/trunk/jsoncpp/test/test_real_04.json b/trunk/jsoncpp/test/data/test_real_04.json similarity index 100% rename from trunk/jsoncpp/test/test_real_04.json rename to trunk/jsoncpp/test/data/test_real_04.json diff --git a/trunk/jsoncpp/test/test_real_05.expected b/trunk/jsoncpp/test/data/test_real_05.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_05.expected rename to trunk/jsoncpp/test/data/test_real_05.expected diff --git a/trunk/jsoncpp/test/test_real_05.json b/trunk/jsoncpp/test/data/test_real_05.json similarity index 100% rename from trunk/jsoncpp/test/test_real_05.json rename to trunk/jsoncpp/test/data/test_real_05.json diff --git a/trunk/jsoncpp/test/test_real_06.expected b/trunk/jsoncpp/test/data/test_real_06.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_06.expected rename to trunk/jsoncpp/test/data/test_real_06.expected diff --git a/trunk/jsoncpp/test/test_real_06.json b/trunk/jsoncpp/test/data/test_real_06.json similarity index 100% rename from trunk/jsoncpp/test/test_real_06.json rename to trunk/jsoncpp/test/data/test_real_06.json diff --git a/trunk/jsoncpp/test/test_real_07.expected b/trunk/jsoncpp/test/data/test_real_07.expected similarity index 100% rename from trunk/jsoncpp/test/test_real_07.expected rename to trunk/jsoncpp/test/data/test_real_07.expected diff --git a/trunk/jsoncpp/test/test_real_07.json b/trunk/jsoncpp/test/data/test_real_07.json similarity index 100% rename from trunk/jsoncpp/test/test_real_07.json rename to trunk/jsoncpp/test/data/test_real_07.json diff --git a/trunk/jsoncpp/test/test_string_01.expected b/trunk/jsoncpp/test/data/test_string_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_01.expected rename to trunk/jsoncpp/test/data/test_string_01.expected diff --git a/trunk/jsoncpp/test/test_string_01.json b/trunk/jsoncpp/test/data/test_string_01.json similarity index 100% rename from trunk/jsoncpp/test/test_string_01.json rename to trunk/jsoncpp/test/data/test_string_01.json diff --git a/trunk/jsoncpp/test/test_string_02.expected b/trunk/jsoncpp/test/data/test_string_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_02.expected rename to trunk/jsoncpp/test/data/test_string_02.expected diff --git a/trunk/jsoncpp/test/test_string_02.json b/trunk/jsoncpp/test/data/test_string_02.json similarity index 100% rename from trunk/jsoncpp/test/test_string_02.json rename to trunk/jsoncpp/test/data/test_string_02.json diff --git a/trunk/jsoncpp/test/test_string_unicode_01.expected b/trunk/jsoncpp/test/data/test_string_unicode_01.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_01.expected rename to trunk/jsoncpp/test/data/test_string_unicode_01.expected diff --git a/trunk/jsoncpp/test/test_string_unicode_01.json b/trunk/jsoncpp/test/data/test_string_unicode_01.json similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_01.json rename to trunk/jsoncpp/test/data/test_string_unicode_01.json diff --git a/trunk/jsoncpp/test/test_string_unicode_02.expected b/trunk/jsoncpp/test/data/test_string_unicode_02.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_02.expected rename to trunk/jsoncpp/test/data/test_string_unicode_02.expected diff --git a/trunk/jsoncpp/test/test_string_unicode_02.json b/trunk/jsoncpp/test/data/test_string_unicode_02.json similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_02.json rename to trunk/jsoncpp/test/data/test_string_unicode_02.json diff --git a/trunk/jsoncpp/test/test_string_unicode_03.expected b/trunk/jsoncpp/test/data/test_string_unicode_03.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_03.expected rename to trunk/jsoncpp/test/data/test_string_unicode_03.expected diff --git a/trunk/jsoncpp/test/test_string_unicode_03.json b/trunk/jsoncpp/test/data/test_string_unicode_03.json similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_03.json rename to trunk/jsoncpp/test/data/test_string_unicode_03.json diff --git a/trunk/jsoncpp/test/test_string_unicode_04.expected b/trunk/jsoncpp/test/data/test_string_unicode_04.expected similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_04.expected rename to trunk/jsoncpp/test/data/test_string_unicode_04.expected diff --git a/trunk/jsoncpp/test/test_string_unicode_04.json b/trunk/jsoncpp/test/data/test_string_unicode_04.json similarity index 100% rename from trunk/jsoncpp/test/test_string_unicode_04.json rename to trunk/jsoncpp/test/data/test_string_unicode_04.json diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index 0dd87e6..e05bd52 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -41,7 +41,7 @@ def safeReadFile( path ): def runAllTests( jsontest_executable_path, input_dir = None, use_valgrind=False ): if not input_dir: - input_dir = os.getcwd() + input_dir = os.path.join( os.getcwd(), 'data' ) tests = glob( os.path.join( input_dir, '*.json' ) ) if RUN_JSONCHECKER: test_jsonchecker = glob( os.path.join( input_dir, 'jsonchecker', '*.json' ) ) From 65f08f95fb620b1012d0497d92c490b8d9c4050b Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 22 Nov 2009 13:11:14 +0000 Subject: [PATCH 087/268] Added instruction to run unit tests manually. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@87 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 5651d37..c1b2c73 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -44,4 +44,10 @@ and TARGET may be: To run the test manually: cd test +# This will run the Reader/Writer tests python runjsontests.py "path to jsontest.exe" +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind using: +python rununittests.py --valgrind "path to test_lib_json.exe" From ed21ceea704918fc7ff85be51c452346dc400ba6 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 23 Nov 2009 22:33:30 +0000 Subject: [PATCH 088/268] Fixed iteration bug over null values. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@88 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 2 + .../src/lib_json/json_valueiterator.inl | 15 +++++ trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 58 +++++++++---------- 3 files changed, 46 insertions(+), 29 deletions(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 3884b08..72fcabd 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -917,6 +917,8 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator private: #ifndef JSON_VALUE_USE_INTERNAL_MAP Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; #else union { diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index fdc52f6..be88c28 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -13,6 +13,7 @@ ValueIteratorBase::ValueIteratorBase() #ifndef JSON_VALUE_USE_INTERNAL_MAP : current_() + , isNull_( true ) #else # error fix me // Need to handle uninitialized iterator comparison for experimental maps #endif @@ -23,6 +24,7 @@ ValueIteratorBase::ValueIteratorBase() #ifndef JSON_VALUE_USE_INTERNAL_MAP ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) : current_( current ) + , isNull_( false ) { } #else @@ -86,6 +88,15 @@ ValueIteratorBase::computeDistance( const SelfType &other ) const # ifdef JSON_USE_CPPTL_SMALLMAP return current_ - other.current_; # else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } return difference_type( std::distance( current_, other.current_ ) ); # endif #else @@ -100,6 +111,10 @@ bool ValueIteratorBase::isEqual( const SelfType &other ) const { #ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } return current_ == other.current_; #else if ( isArray_ ) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index cd219bd..dfd2b5c 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -505,35 +505,35 @@ Runner::runCommandLine( int argc, const char *argv[] ) const #if defined(_MSC_VER) // Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} #endif // if defined(_MSC_VER) From 93cd6e151441ac36e27ba08bcd4785147171d256 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 24 Nov 2009 17:43:58 +0000 Subject: [PATCH 089/268] Fixed compilation with Sun Studio 12 (avoid usage of std::distance) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@89 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_valueiterator.inl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index be88c28..d6cc031 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -97,7 +97,18 @@ ValueIteratorBase::computeDistance( const SelfType &other ) const { return 0; } - return difference_type( std::distance( current_, other.current_ ) ); + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; # endif #else if ( isArray_ ) From 1efe80c6270c0bf61aa355a37f552812d4857c2f Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 26 Nov 2009 10:52:18 +0000 Subject: [PATCH 090/268] Added another failing unicode test case git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@90 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/data/test_string_unicode_05.expected | 1 + trunk/jsoncpp/test/data/test_string_unicode_05.json | 1 + 2 files changed, 2 insertions(+) create mode 100644 trunk/jsoncpp/test/data/test_string_unicode_05.expected create mode 100644 trunk/jsoncpp/test/data/test_string_unicode_05.json diff --git a/trunk/jsoncpp/test/data/test_string_unicode_05.expected b/trunk/jsoncpp/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..e785065 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_unicode_05.expected @@ -0,0 +1 @@ +.="za\u017c\u00f3\u0142\u0107 g\u0119\u015bl\u0105 ja\u017a\u0144" \ No newline at end of file diff --git a/trunk/jsoncpp/test/data/test_string_unicode_05.json b/trunk/jsoncpp/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file From 9edcde89eced0233449512555d34c69d0bf37155 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 15 Jan 2010 14:56:59 +0000 Subject: [PATCH 091/268] Integrated part of Patch #2839016, fixing default iterator initialization when using internal map. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@91 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 13 +++++++++++++ trunk/jsoncpp/src/lib_json/json_valueiterator.inl | 8 ++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 72fcabd..ce3d3cd 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -637,6 +637,13 @@ namespace Json { # ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION struct IteratorState { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } ValueInternalMap *map_; ValueInternalLink *link_; BucketIndex itemIndex_; @@ -729,6 +736,12 @@ namespace Json { # ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION struct IteratorState // Must be a POD { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } ValueInternalArray *array_; Value **currentPageIndex_; unsigned int currentItemIndex_; diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index d6cc031..898c358 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -14,11 +14,15 @@ ValueIteratorBase::ValueIteratorBase() #ifndef JSON_VALUE_USE_INTERNAL_MAP : current_() , isNull_( true ) +{ +} #else -# error fix me // Need to handle uninitialized iterator comparison for experimental maps -#endif + : isArray_( true ) + , isNull_( true ) { + iterator_.array_ = ValueInternalArray::IteratorState(); } +#endif #ifndef JSON_VALUE_USE_INTERNAL_MAP From c8ac7b49fa94d0cf477cf190b05c93ac6599e47c Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 19 Feb 2010 05:09:54 +0000 Subject: [PATCH 092/268] - fixed failing test git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@92 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/data/test_string_unicode_05.expected | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/test/data/test_string_unicode_05.expected b/trunk/jsoncpp/test/data/test_string_unicode_05.expected index e785065..19b2c40 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_05.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_05.expected @@ -1 +1,2 @@ -.="za\u017c\u00f3\u0142\u0107 g\u0119\u015bl\u0105 ja\u017a\u0144" \ No newline at end of file +.="Zażółć gęślą jaźń" + From f95413cfb91e07aeadd631391405384b5a94645b Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 19 Feb 2010 05:10:41 +0000 Subject: [PATCH 093/268] renamed jsontestrunner.py to pyjsontestrunner.py to avoid confusion git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@93 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/{jsontestrunner.py => pyjsontestrunner.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename trunk/jsoncpp/test/{jsontestrunner.py => pyjsontestrunner.py} (100%) diff --git a/trunk/jsoncpp/test/jsontestrunner.py b/trunk/jsoncpp/test/pyjsontestrunner.py similarity index 100% rename from trunk/jsoncpp/test/jsontestrunner.py rename to trunk/jsoncpp/test/pyjsontestrunner.py From a935bab928da80771af6bc8e436930ff3fc59ede Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 21 Feb 2010 10:44:12 +0000 Subject: [PATCH 094/268] - added a large test that cause(d?) crash with experimental JSON_VALUE_USE_INTERNAL_MAP git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@94 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- .../jsoncpp/test/data/test_large_01.expected | 2122 +++++++++++++++++ trunk/jsoncpp/test/data/test_large_01.json | 2 + 2 files changed, 2124 insertions(+) create mode 100644 trunk/jsoncpp/test/data/test_large_01.expected create mode 100644 trunk/jsoncpp/test/data/test_large_01.json diff --git a/trunk/jsoncpp/test/data/test_large_01.expected b/trunk/jsoncpp/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/trunk/jsoncpp/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/trunk/jsoncpp/test/data/test_large_01.json b/trunk/jsoncpp/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/trunk/jsoncpp/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file From dca1f256ab471157bee285e8bac62081b8ba7a60 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 21 Feb 2010 14:08:17 +0000 Subject: [PATCH 095/268] - added Int/UInt typedef in Json namespace. Modified Value::Int and Value::UInt to be typedef on those. Modified code to use Json::Int instead of Value::Int. - added Value constructor taking begin/end pointer to initialize the Value with a non-zero terminated string. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@95 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/forwards.h | 7 ++++++- trunk/jsoncpp/include/json/value.h | 11 +++++----- trunk/jsoncpp/include/json/writer.h | 4 ++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 21 ++++++++++++++++--- .../src/lib_json/json_valueiterator.inl | 2 +- trunk/jsoncpp/src/lib_json/json_writer.cpp | 6 +++--- 6 files changed, 36 insertions(+), 15 deletions(-) diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index ee76071..d0ce830 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -5,14 +5,19 @@ namespace Json { + // writer.h class FastWriter; - class Reader; class StyledWriter; + // reader.h + class Reader; + // features.h class Features; // value.h + typedef int Int; + typedef unsigned int UInt; class StaticString; class Path; class PathArgument; diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index ce3d3cd..d575b70 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -117,10 +117,10 @@ namespace Json { # endif public: typedef std::vector Members; - typedef int Int; - typedef unsigned int UInt; typedef ValueIterator iterator; typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; typedef UInt ArrayIndex; static const Value null; @@ -186,6 +186,7 @@ namespace Json { Value( UInt value ); Value( double value ); Value( const char *value ); + Value( const char *beginValue, const char *endValue ); /** \brief Constructs a value from a static string. * Like other value string constructor but do not duplicate the string for @@ -453,7 +454,7 @@ namespace Json { friend class Path; PathArgument(); - PathArgument( Value::UInt index ); + PathArgument( UInt index ); PathArgument( const char *key ); PathArgument( const std::string &key ); @@ -465,7 +466,7 @@ namespace Json { kindKey }; std::string key_; - Value::UInt index_; + UInt index_; Kind kind_; }; @@ -909,7 +910,7 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator Value key() const; /// Return the index of the referenced Value. -1 if it is not an arrayValue. - Value::UInt index() const; + UInt index() const; /// Return the member name of the referenced Value. "" if it is not an objectValue. const char *memberName() const; diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index cfa92c6..5f4b83b 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -157,8 +157,8 @@ namespace Json { bool addChildValues_; }; - std::string JSON_API valueToString( Value::Int value ); - std::string JSON_API valueToString( Value::UInt value ); + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); std::string JSON_API valueToString( double value ); std::string JSON_API valueToString( bool value ); std::string JSON_API valueToQuotedString( const char *value ); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 734bf16..573205f 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -20,9 +20,9 @@ namespace Json { const Value Value::null; -const Value::Int Value::minInt = Value::Int( ~(Value::UInt(-1)/2) ); -const Value::Int Value::maxInt = Value::Int( Value::UInt(-1)/2 ); -const Value::UInt Value::maxUInt = Value::UInt(-1); +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); // A "safe" implementation of strdup. Allow null pointer to be passed. // Also avoid warning on msvc80. @@ -351,6 +351,21 @@ Value::Value( const char *value ) value_.string_ = valueAllocator()->duplicateStringValue( value ); } + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + Value::Value( const std::string &value ) : type_( stringValue ) , allocated_( true ) diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index 898c358..736e260 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -176,7 +176,7 @@ ValueIteratorBase::key() const } -Value::UInt +UInt ValueIteratorBase::index() const { #ifndef JSON_VALUE_USE_INTERNAL_MAP diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index f2f8311..cdf4188 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -39,14 +39,14 @@ static void uintToString( unsigned int value, while ( value != 0 ); } -std::string valueToString( Value::Int value ) +std::string valueToString( Int value ) { char buffer[32]; char *current = buffer + sizeof(buffer); bool isNegative = value < 0; if ( isNegative ) value = -value; - uintToString( Value::UInt(value), current ); + uintToString( UInt(value), current ); if ( isNegative ) *--current = '-'; assert( current >= buffer ); @@ -54,7 +54,7 @@ std::string valueToString( Value::Int value ) } -std::string valueToString( Value::UInt value ) +std::string valueToString( UInt value ) { char buffer[32]; char *current = buffer + sizeof(buffer); From 64d7e231b25c6f212894c93c157889b465dbd99a Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 21 Feb 2010 14:24:52 +0000 Subject: [PATCH 096/268] - updated roadmap (utf-8 handling in Reader) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@96 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index 84648b9..7f3aa1a 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -2,11 +2,9 @@ \section ms_release Makes JsonCpp ready for release - Build system clean-up: - Fix build on Windows (shared-library build is broken) - - Fix doxygen build issue (discard doxygen dependency check, always rebuild) - Add enable/disable flag for static and shared library build - Enhance help - - Test with recent Scons checkpoint - - Platform portability check: + - Platform portability check: (Notes: was ok on last check) - linux/gcc, - solaris/cc, - windows/msvc678, @@ -14,17 +12,16 @@ - Add JsonCpp version to header as numeric for use in preprocessor test - Remove buggy experimental hash stuff - Release on sourceforge download - \section ms_unicode Clean-up unicode handling - - Ensure reader properly convert \u and \U unicode sequence to UTF8 - - Ensure writer emit only UTF8 string. - - Provides hook to convert string to/from utf8/other encoding. - - look into iconv, icu and windows API \section ms_strict Adds a strict mode to reader/parser Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - Enforce only object or array as root element - Disable comment support + - Get jsonchecker failing tests to pass in strict mode \section ms_separation Expose json reader/writer API that do not impose using Json::Value. Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. - Performance oriented parser/writer: - Provides an event based parser. Should allow pulling & skipping events for ease of use. - Provides a JSON document builder: fast only. From a9514d51dff0a5c921a4c39bde72902b0658d531 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 21 Feb 2010 14:26:08 +0000 Subject: [PATCH 097/268] - added command-line option --with-json-checker to run test from jsonchecker test suite of json.org. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@97 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/runjsontests.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index e05bd52..800337d 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -4,7 +4,6 @@ from glob import glob import optparse -RUN_JSONCHECKER = False VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' def compareOutputs( expected, actual, message ): @@ -39,12 +38,12 @@ def safeReadFile( path ): return '' % (path,e) def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False ): + use_valgrind=False, with_json_checker=False ): if not input_dir: input_dir = os.path.join( os.getcwd(), 'data' ) tests = glob( os.path.join( input_dir, '*.json' ) ) - if RUN_JSONCHECKER: - test_jsonchecker = glob( os.path.join( input_dir, 'jsonchecker', '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) else: test_jsonchecker = [] failed_tests = [] @@ -112,6 +111,9 @@ def main(): parser.add_option("--valgrind", action="store_true", dest="valgrind", default=False, help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") parser.enable_interspersed_args() options, args = parser.parse_args() @@ -125,7 +127,7 @@ def main(): else: input_path = None status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind ) + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) sys.exit( status ) if __name__ == '__main__': From c2aa72498fa21bf26aa92ae65aebed46a3e292e2 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 21 Feb 2010 14:28:54 +0000 Subject: [PATCH 098/268] - added missing virtual destructor to TestCase. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@98 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 5 +++++ trunk/jsoncpp/src/test_lib_json/jsontest.h | 2 ++ 2 files changed, 7 insertions(+) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index dfd2b5c..a07d0fe 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -304,6 +304,11 @@ TestCase::TestCase() } +TestCase::~TestCase() +{ +} + + void TestCase::run( TestResult &result ) { diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 7abba56..325b1d2 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -110,6 +110,8 @@ namespace JsonTest { public: TestCase(); + virtual ~TestCase(); + void run( TestResult &result ); virtual const char *testName() const = 0; From d82593cd404bd89dc6989ede57b9021990cb4a37 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 22 Feb 2010 04:16:10 +0000 Subject: [PATCH 099/268] - Documentation generation is no longer handled by SCons. The script doxybuild.py is used to generate the documentation on demand. - Added file 'version' that contains jsoncpp version number. It is used by both SConstruct and doxybuild.py. - Updated README.txt with documentation build instruction, and instructions to add a test case. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@99 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 74 +- trunk/jsoncpp/SConstruct | 77 +- trunk/jsoncpp/doc/doxyfile.in | 1364 +++++++++++++++++++++++++- trunk/jsoncpp/doc/sconscript | 61 -- trunk/jsoncpp/doxybuild.py | 191 ++++ trunk/jsoncpp/scons-tools/doxygen.py | 116 --- trunk/jsoncpp/version | 1 + 7 files changed, 1600 insertions(+), 284 deletions(-) delete mode 100644 trunk/jsoncpp/doc/sconscript create mode 100644 trunk/jsoncpp/doxybuild.py delete mode 100644 trunk/jsoncpp/scons-tools/doxygen.py create mode 100644 trunk/jsoncpp/version diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index c1b2c73..ed7ef8f 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -1,10 +1,11 @@ * Introduction: + ============= JSON (JavaScript Object Notation) is a lightweight data-interchange format. It can represent integer, real number, string, an ordered sequence of value, and a collection of name/value pairs. -JsonCpp is a simple API to manipulate JSON value, and handle serialization +JsonCpp is a simple API to manipulate JSON value, handle serialization and unserialization to string. It can also preserve existing comment in unserialization/serialization steps, @@ -12,7 +13,9 @@ making it a convenient format to store user input files. Unserialization parsing is user friendly and provides precise error reports. + * Building/Testing: + ================= JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires python to be installed (http://www.python.org). @@ -39,15 +42,76 @@ to do so. and TARGET may be: check: build library and run unit tests. - doc: build documentation - doc-dist: build documentation tarball -To run the test manually: + +* Running the test manually: + ========================== + cd test # This will run the Reader/Writer tests python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + # This will run the unit tests (mostly Value) python rununittests.py "path to test_lib_json.exe" -You can run the tests using valgrind using: +You can run the tests using valgrind: python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 00d3741..0499db9 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -1,79 +1,17 @@ """ -Build system can be clean-up by sticking to a few core production factory, with automatic dependencies resolution. -4 basic project productions: -- library -- binary -- documentation -- tests +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. -* Library: - Input: - - dependencies (other libraries) - - headers: include path & files - - sources - - generated sources - - resources - - generated resources - Production: - - Static library - - Dynamic library - - Naming rule - Life-cycle: - - Library compilation - - Compilation as a dependencies - - Run-time - - Packaging - Identity: - - Name - - Version -* Binary: - Input: - - dependencies (other libraries) - - headers: include path & files (usually empty) - - sources - - generated sources - - resources - - generated resources - - supported variant (optimized/debug, dll/static...) - Production: - - Binary executable - - Manifest [on some platforms] - - Debug symbol [on some platforms] - Life-cycle: - - Compilation - - Run-time - - Packaging - Identity: - - Name - - Version -* Documentation: - Input: - - dependencies (libraries, binaries) - - additional sources - - generated sources - - resources - - generated resources - - supported variant (public/internal) - Production: - - HTML documentation - - PDF documentation - - CHM documentation - Life-cycle: - - Documentation - - Packaging - - Test - Identity: - - Name - - Version +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. """ - - import os import os.path import sys -JSONCPP_VERSION = '0.2' +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() DIST_DIR = '#dist' options = Variables() @@ -174,8 +112,6 @@ else: print "UNSUPPORTED PLATFORM." env.Exit(1) -env.Tool('doxygen') -env.Tool('substinfile') env.Tool('targz') env.Tool('srcdist') env.Tool('globtool') @@ -295,6 +231,5 @@ env.Alias( 'src-dist', srcdist_cmd ) buildProjectInDirectory( 'src/jsontestrunner' ) buildProjectInDirectory( 'src/lib_json' ) buildProjectInDirectory( 'src/test_lib_json' ) -buildProjectInDirectory( 'doc' ) #print env.Dump() diff --git a/trunk/jsoncpp/doc/doxyfile.in b/trunk/jsoncpp/doc/doxyfile.in index f19f037..b170b0f 100644 --- a/trunk/jsoncpp/doc/doxyfile.in +++ b/trunk/jsoncpp/doc/doxyfile.in @@ -1,16 +1,91 @@ -# Doxyfile 1.4.3 +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- -PROJECT_NAME = "JsonCpp" -PROJECT_NUMBER = %JSONCPP_VERSION% + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "CppUnit 2" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %CPPUNIT_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + OUTPUT_LANGUAGE = English -USE_WINDOWS_ENCODING = NO + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ @@ -22,211 +97,1438 @@ ABBREVIATE_BRIEF = "The $name class" \ a \ an \ the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + SHORT_NAMES = NO -JAVADOC_AUTOBRIEF = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + MULTILINE_CPP_IS_BRIEF = NO -DETAILS_AT_TOP = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + INHERIT_DOCS = YES -DISTRIBUTE_GROUP_DOC = NO + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + TAB_SIZE = 3 -ALIASES = + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + SORT_BRIEF_DOCS = NO -SORT_BY_SCOPE_NAME = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + GENERATE_TODOLIST = YES -GENERATE_TESTLIST = YES -GENERATE_BUGLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + #--------------------------------------------------------------------------- # configuration options related to warning and progress messages #--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + WARN_FORMAT = "$file:$line: $text" -WARN_LOGFILE = jsoncpp-doxygen-warning.log + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + #--------------------------------------------------------------------------- # configuration options related to the input files #--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + INPUT = ../include ../src/lib_json . -FILE_PATTERNS = *.h *.cpp *.dox + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + FILTER_SOURCE_FILES = NO + #--------------------------------------------------------------------------- # configuration options related to source browsing #--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + VERBATIM_HEADERS = YES + #--------------------------------------------------------------------------- # configuration options related to the alphabetical class index #--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + IGNORE_PREFIX = + #--------------------------------------------------------------------------- # configuration options related to the HTML output #--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + GENERATE_HTML = YES -HTML_OUTPUT = json-html-doc-%JSONCPP_VERSION% + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + HTML_ALIGN_MEMBERS = YES -GENERATE_HTMLHELP = NO -CHM_FILE = jsoncpp.chm -HHC_LOCATION = -GENERATE_CHI = NO -BINARY_TOC = NO -TOC_EXPAND = NO + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + #--------------------------------------------------------------------------- # configuration options related to the LaTeX output #--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + #--------------------------------------------------------------------------- # configuration options related to the RTF output #--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + RTF_EXTENSIONS_FILE = + #--------------------------------------------------------------------------- # configuration options related to the man page output #--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + MAN_LINKS = NO + #--------------------------------------------------------------------------- # configuration options related to the XML output #--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + XML_PROGRAMLISTING = YES + #--------------------------------------------------------------------------- # configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + GENERATE_AUTOGEN_DEF = NO + #--------------------------------------------------------------------------- # configuration options related to the Perl module output #--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + PERLMOD_MAKEVAR_PREFIX = + #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + ENABLE_PREPROCESSING = YES -MACRO_EXPANSION = NO + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + INCLUDE_FILE_PATTERNS = *.h -PREDEFINED = JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + SKIP_FUNCTION_MACROS = YES + #--------------------------------------------------------------------------- # Configuration::additions related to external references #--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + PERL_PATH = /usr/bin/perl + #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + CLASS_DIAGRAMS = NO -HIDE_UNDOC_RELATIONS = YES -HAVE_DOT = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + GROUP_GRAPHS = YES -UML_LOOK = NO -TEMPLATE_RELATIONS = NO + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + DOT_IMAGE_FORMAT = png -DOT_PATH = + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + DOTFILE_DIRS = -MAX_DOT_GRAPH_WIDTH = 1024 -MAX_DOT_GRAPH_HEIGHT = 1024 + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + DOT_TRANSPARENT = NO -DOT_MULTI_TARGETS = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + DOT_CLEANUP = YES + #--------------------------------------------------------------------------- -# Configuration::additions related to the search engine +# Options related to the search engine #--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + SEARCHENGINE = NO diff --git a/trunk/jsoncpp/doc/sconscript b/trunk/jsoncpp/doc/sconscript deleted file mode 100644 index 7b78a21..0000000 --- a/trunk/jsoncpp/doc/sconscript +++ /dev/null @@ -1,61 +0,0 @@ -Import( 'env' ) -import os.path - -if 'doxygen' in env['TOOLS']: - doc_topdir = str(env['ROOTBUILD_DIR']) - html_dir = 'jsoncpp-api-doc' - - doxygen_inputs = env.Glob( includes = '*.dox', dir = '#doc' ) \ - + env.Glob( includes = '*.h', dir = '#include/json/' ) \ - + env.Glob( includes = ('*.dox','*.h','*.inl','*.cpp'), - dir = '#src/lib_json' ) -## for p in doxygen_inputs: -## print p.abspath - - top_dir = env.Dir('#').abspath - include_top_dir = env.Dir('#include').abspath - env['DOXYFILE_DICT'] = { 'PROJECT_NAME': 'JsonCpp', - 'PROJECT_NUMBER': env['JSONCPP_VERSION'], - 'STRIP_FROM_PATH': top_dir, - 'STRIP_FROM_INC_PATH': include_top_dir, - 'HTML_OUTPUT': html_dir, - 'HTML_HEADER': env.File('#doc/header.html').abspath, - 'HTML_FOOTER': env.File('#doc/footer.html').abspath, - 'INCLUDE_PATH': include_top_dir, - 'PREDEFINED': 'JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSON_VALUE_USE_INTERNAL_MAP' - } - env['DOXYFILE_FILE'] = 'doxyfile.in' - doxfile_nodes = env.Doxyfile( os.path.join( doc_topdir, 'doxyfile' ), doxygen_inputs ) - html_doc_path = os.path.join( doc_topdir, html_dir ) - doc_nodes = env.Doxygen( source = doxfile_nodes, - target = os.path.join( html_doc_path, 'index.html' ) ) - alias_doc_cmd = env.Alias('doc', doc_nodes ) - env.Alias('doc', env.Install( html_doc_path, '#README.txt' ) ) - if 'TarGz' in env['BUILDERS']: - targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % html_dir ) - zip_doc_cmd = env.TarGz( targz_path, [env.Dir(html_doc_path)], - TARGZ_BASEDIR = env['ROOTBUILD_DIR'] ) - env.Depends( zip_doc_cmd, alias_doc_cmd ) - env.Alias( 'doc-dist', zip_doc_cmd ) -## -## doxyfile = env.SubstInFile( '#doc/doxyfile', 'doxyfile.in', -## SUBST_DICT = { -## '%JSONCPP_VERSION%' : env['JSONCPP_VERSION'], -## '%TOPDIR%' : env.Dir('#').abspath, -## '%DOC_TOPDIR%' : str(doc_topdir) } ) -## doc_cmd = env.Doxygen( doxyfile ) -## alias_doc_cmd = env.Alias('doc', doc_cmd ) -## env.AlwaysBuild(alias_doc_cmd) -## -## for dir in doc_cmd: -## env.Alias('doc', env.Install( '#' + dir.path, '#README.txt' ) ) -## filename = os.path.split(dir.path)[1] -## targz_path = os.path.join( env['DIST_DIR'], '%s.tar.gz' % filename ) -## zip_doc_cmd = env.TarGz( targz_path, [env.Dir(dir)], -## TARGZ_BASEDIR = doc_topdir ) -## env.Depends( zip_doc_cmd, alias_doc_cmd ) -## env.Alias( 'doc-dist', zip_doc_cmd ) -## -## # When doxyfile gets updated, I get errors on the first pass. -## # I have to run scons twice. Something is wrong with the dependencies -## # here, but I avoid it by running "scons doc/doxyfile" first. diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py new file mode 100644 index 0000000..445de4b --- /dev/null +++ b/trunk/jsoncpp/doxybuild.py @@ -0,0 +1,191 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() + + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + try: + subprocess.check_call( cmd ) + except subprocess.CalledProcessError: + return False + return True + finally: + os.chdir( old_cwd ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) + print open(os.path.join('doc', warning_log_path), 'rb').read() + if not ok: + print 'Doxygen generation failed' + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +if __name__ == '__main__': + main() diff --git a/trunk/jsoncpp/scons-tools/doxygen.py b/trunk/jsoncpp/scons-tools/doxygen.py deleted file mode 100644 index 5ace420..0000000 --- a/trunk/jsoncpp/scons-tools/doxygen.py +++ /dev/null @@ -1,116 +0,0 @@ -# Big issue: -# emitter depends on doxyfile which is generated from doxyfile.in. -# build fails after cleaning and relaunching the build. - -# Todo: -# Add helper function to environment like for glob -# Easier passage of header/footer -# Automatic deduction of index.html path based on custom parameters passed to doxyfile - -import os -import os.path -from fnmatch import fnmatch -import SCons - -def Doxyfile_emitter(target, source, env): - """ - Modify the target and source lists to use the defaults if nothing - else has been specified. - - Dependencies on external HTML documentation references are also - appended to the source list. - """ - doxyfile_template = env.File(env['DOXYFILE_FILE']) - source.insert(0, doxyfile_template) - - return target, source - -def Doxyfile_Builder(target, source, env): - """Input: - DOXYFILE_FILE - Path of the template file for the output doxyfile - - DOXYFILE_DICT - A dictionnary of parameter to append to the generated doxyfile - """ - subdir = os.path.split(source[0].abspath)[0] - doc_top_dir = os.path.split(target[0].abspath)[0] - doxyfile_path = source[0].abspath - doxy_file = file( target[0].abspath, 'wt' ) - try: - # First, output the template file - try: - f = file(doxyfile_path, 'rt') - doxy_file.write( f.read() ) - f.close() - doxy_file.write( '\n' ) - doxy_file.write( '# Generated content:\n' ) - except: - raise SCons.Errors.UserError, "Can't read doxygen template file '%s'" % doxyfile_path - # Then, the input files - doxy_file.write( 'INPUT = \\\n' ) - for source in source: - if source.abspath != doxyfile_path: # skip doxyfile path, which is the first source - doxy_file.write( '"%s" \\\n' % source.abspath ) - doxy_file.write( '\n' ) - # Dot... - values_dict = { 'HAVE_DOT': env.get('DOT') and 'YES' or 'NO', - 'DOT_PATH': env.get('DOT') and os.path.split(env['DOT'])[0] or '', - 'OUTPUT_DIRECTORY': doc_top_dir, - 'WARN_LOGFILE': target[0].abspath + '-warning.log'} - values_dict.update( env['DOXYFILE_DICT'] ) - # Finally, output user dictionary values which override any of the previously set parameters. - for key, value in values_dict.iteritems(): - doxy_file.write ('%s = "%s"\n' % (key, str(value))) - finally: - doxy_file.close() - -def generate(env): - """ - Add builders and construction variables for the - Doxygen tool. - """ - ## Doxyfile builder - def doxyfile_message (target, source, env): - return "creating Doxygen config file '%s'" % target[0] - - doxyfile_variables = [ - 'DOXYFILE_DICT', - 'DOXYFILE_FILE' - ] - - #doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, - # doxyfile_variables ) - doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message) - - doxyfile_builder = SCons.Builder.Builder( action = doxyfile_action, - emitter = Doxyfile_emitter ) - - env['BUILDERS']['Doxyfile'] = doxyfile_builder - env['DOXYFILE_DICT'] = {} - env['DOXYFILE_FILE'] = 'doxyfile.in' - - ## Doxygen builder - def Doxygen_emitter(target, source, env): - output_dir = str( source[0].dir ) - if str(target[0]) == str(source[0]): - target = env.File( os.path.join( output_dir, 'html', 'index.html' ) ) - return target, source - - doxygen_action = SCons.Action.Action( [ '$DOXYGEN_COM'] ) - doxygen_builder = SCons.Builder.Builder( action = doxygen_action, - emitter = Doxygen_emitter ) - env['BUILDERS']['Doxygen'] = doxygen_builder - env['DOXYGEN_COM'] = '$DOXYGEN $DOXYGEN_FLAGS $SOURCE' - env['DOXYGEN_FLAGS'] = '' - env['DOXYGEN'] = 'doxygen' - - dot_path = env.WhereIs("dot") - if dot_path: - env['DOT'] = dot_path - -def exists(env): - """ - Make sure doxygen exists. - """ - return env.Detect("doxygen") diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version new file mode 100644 index 0000000..ab67193 --- /dev/null +++ b/trunk/jsoncpp/version @@ -0,0 +1 @@ +0.5.0-rc \ No newline at end of file From e7b4846912206f82ae558017a49e3be86871c864 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 22 Feb 2010 04:37:31 +0000 Subject: [PATCH 100/268] - fixed project name and version git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@100 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/doxyfile.in | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/doc/doxyfile.in b/trunk/jsoncpp/doc/doxyfile.in index b170b0f..48861d2 100644 --- a/trunk/jsoncpp/doc/doxyfile.in +++ b/trunk/jsoncpp/doc/doxyfile.in @@ -25,13 +25,13 @@ DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded # by quotes) that should identify the project. -PROJECT_NAME = "CppUnit 2" +PROJECT_NAME = "JsonCpp" # The PROJECT_NUMBER tag can be used to enter a project or revision number. # This could be handy for archiving the generated documentation or # if some version control system is used. -PROJECT_NUMBER = %CPPUNIT_VERSION% +PROJECT_NUMBER = %JSONCPP_VERSION% # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) # base path where the generated documentation will be put. From ec9b23568480353b8d3cd281204de2c47d83ee33 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 03:23:09 +0000 Subject: [PATCH 101/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@101 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 200 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 167 files changed, 13868 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0c48763 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,200 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() + + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + try: + subprocess.check_call( cmd ) + except subprocess.CalledProcessError: + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) + print open(os.path.join('doc', warning_log_path), 'rb').read() + if not ok: + print 'Doxygen generation failed' + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..1aa5978 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..325b1d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..9864178 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..84f56b6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From cfde4cfdcc3d8c5c9c023e3e389e88d649f9b917 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 03:27:38 +0000 Subject: [PATCH 102/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@102 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 200 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 167 files changed, 13868 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0c48763..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - try: - subprocess.check_call( cmd ) - except subprocess.CalledProcessError: - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) - print open(os.path.join('doc', warning_log_path), 'rb').read() - if not ok: - print 'Doxygen generation failed' - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 1aa5978..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 325b1d2..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index 9864178..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 84f56b6..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 459e0dd5dde7b6477b1eded5df21457b9c2c64b3 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 03:28:23 +0000 Subject: [PATCH 103/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@103 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 200 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 167 files changed, 13868 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0c48763 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,200 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() + + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + try: + subprocess.check_call( cmd ) + except subprocess.CalledProcessError: + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) + print open(os.path.join('doc', warning_log_path), 'rb').read() + if not ok: + print 'Doxygen generation failed' + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..1aa5978 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..325b1d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..9864178 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..84f56b6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 567f9338d38f4aba8f85d9e1ef35e1b6de268c8d Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 07:41:58 +0000 Subject: [PATCH 104/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@104 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 200 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 167 files changed, 13868 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0c48763..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - try: - subprocess.check_call( cmd ) - except subprocess.CalledProcessError: - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) - print open(os.path.join('doc', warning_log_path), 'rb').read() - if not ok: - print 'Doxygen generation failed' - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 1aa5978..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 325b1d2..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index 9864178..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 84f56b6..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 9cd4a05db091a6565ee2ff28470365e46c31996a Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 07:43:10 +0000 Subject: [PATCH 105/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@105 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 208 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 167 files changed, 13876 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..fe36556 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,208 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() + + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + print open(os.path.join('doc', warning_log_path), 'rb').read() + if not ok: + print 'Doxygen generation failed' + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..1aa5978 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..325b1d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..9864178 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..84f56b6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 4e26e5384865b84790e75433f549487256cfd872 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 07:49:53 +0000 Subject: [PATCH 106/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@106 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 208 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 167 files changed, 13876 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index fe36556..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - print open(os.path.join('doc', warning_log_path), 'rb').read() - if not ok: - print 'Doxygen generation failed' - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 1aa5978..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 325b1d2..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index 9864178..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 84f56b6..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 7cc9374a8543c951d41450e426ba947a0e9b5cb1 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 07:51:07 +0000 Subject: [PATCH 107/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@107 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 207 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 167 files changed, 13875 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..82bdea6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,207 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() + + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(os.path.join('doc', warning_log_path), 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..1aa5978 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..325b1d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..9864178 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..84f56b6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 2719d1d9a61f3355fd044f4a7e0e46b90c677339 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 07:57:38 +0000 Subject: [PATCH 108/268] - added (incomplete) script makerelease.py to handle svn tagging and tar balls generation git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@108 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doxybuild.py | 86 ++++++++++++--------- trunk/jsoncpp/makerelease.py | 141 +++++++++++++++++++++++++++++++++++ 2 files changed, 192 insertions(+), 35 deletions(-) create mode 100644 trunk/jsoncpp/makerelease.py diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py index 445de4b..82bdea6 100644 --- a/trunk/jsoncpp/doxybuild.py +++ b/trunk/jsoncpp/doxybuild.py @@ -89,14 +89,14 @@ def do_subst_in_file(targetfile, sourcefile, dict): print "Can't write target file %s"%targetfile raise -def run_doxygen(doxygen_path, config_file, working_dir): +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): config_file = os.path.abspath( config_file ) doxygen_path = doxygen_path old_cwd = os.getcwd() try: os.chdir( working_dir ) cmd = [doxygen_path, config_file] - print ' '.join( cmd ) + print 'Running:', ' '.join( cmd ) try: import subprocess except: @@ -104,40 +104,27 @@ def run_doxygen(doxygen_path, config_file, working_dir): print 'Documentation generation failed' return False else: - try: - subprocess.check_call( cmd ) - except subprocess.CalledProcessError: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout return False return True finally: os.chdir( old_cwd ) -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.enable_interspersed_args() - options, args = parser.parse_args() +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True version = open('version','rt').read().strip() output_dir = '../build/doxygen' # relative to doc/doxyfile location. @@ -167,10 +154,9 @@ def yesno( bool ): os.makedirs( full_output_dir ) do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc' ) - print open(os.path.join('doc', warning_log_path), 'rb').read() - if not ok: - print 'Doxygen generation failed' + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(os.path.join('doc', warning_log_path), 'rb').read() index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) print 'Generated documentation can be found in:' print index_path @@ -187,5 +173,35 @@ def yesno( bool ): tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + if __name__ == '__main__': main() diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py new file mode 100644 index 0000000..c21a1e3 --- /dev/null +++ b/trunk/jsoncpp/makerelease.py @@ -0,0 +1,141 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + #@todo: + # svn export + # source tarball + # decompress source tarball + # ?compile & run & check + # ?upload documentation + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() From a58e3c192fdf71b5ba9398b6d733dfeadfabd317 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 08:23:41 +0000 Subject: [PATCH 109/268] - changed SVN EOL properties so that HTML file are in Unix format, Visual Studio solution are always in Windows format, and sources are in native format. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@109 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/makefiles/vs71/lib_json.vcproj | 428 ++++++++-------- trunk/jsoncpp/src/test_lib_json/jsontest.h | 508 +++++++++---------- trunk/jsoncpp/src/test_lib_json/main.cpp | 488 +++++++++--------- trunk/jsoncpp/src/test_lib_json/sconscript | 20 +- 4 files changed, 722 insertions(+), 722 deletions(-) diff --git a/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj b/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj index 1aa5978..2d7bf99 100644 --- a/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj +++ b/trunk/jsoncpp/makefiles/vs71/lib_json.vcproj @@ -1,214 +1,214 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 325b1d2..8f0bd31 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -1,254 +1,254 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 9864178..b80776d 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -1,244 +1,244 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/trunk/jsoncpp/src/test_lib_json/sconscript b/trunk/jsoncpp/src/test_lib_json/sconscript index 84f56b6..915fd01 100644 --- a/trunk/jsoncpp/src/test_lib_json/sconscript +++ b/trunk/jsoncpp/src/test_lib_json/sconscript @@ -1,10 +1,10 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') From b551518ca41f06ff58a1580034bdc3aa80d16afa Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 08:44:52 +0000 Subject: [PATCH 110/268] - added svn export - prepared tool for eol conversion git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@110 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/devtools/antglob.py | 201 ++++++++++++++++++++++++++ trunk/jsoncpp/devtools/wscript | 225 ++++++++++++++++++++++++++++++ trunk/jsoncpp/makerelease.py | 31 ++-- 3 files changed, 447 insertions(+), 10 deletions(-) create mode 100644 trunk/jsoncpp/devtools/antglob.py create mode 100644 trunk/jsoncpp/devtools/wscript diff --git a/trunk/jsoncpp/devtools/antglob.py b/trunk/jsoncpp/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/trunk/jsoncpp/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/trunk/jsoncpp/devtools/wscript b/trunk/jsoncpp/devtools/wscript new file mode 100644 index 0000000..61b5183 --- /dev/null +++ b/trunk/jsoncpp/devtools/wscript @@ -0,0 +1,225 @@ +VERSION='0.1.0' +APPNAME='CppUnit2' +srcdir = '.' +blddir = 'build' + +import Options +import Logs +import UnitTest +import Utils +import os.path +import sys +import glob + +CPPUT_EXAMPLES = ''' + checking_assertions + ignore_failure_demo + input_test + light_fixture + log_demo + parametrized_test + stringize_demo + test_function + '''.split() + +BROKEN_CPPUT_EXAMPLES = ''' + input_based_test + opentest_demo + table_fixture + '''.split() + +def _get_example_dirs(): + return [ os.path.join( 'examples', d ) + for d in CPPUT_EXAMPLES ] + +def _get_main_script_dir(): + """Gets the path of the directory containing this script.""" + # The main script path is only valid once the it has been executed, hence this can not be a global var. + assert Utils.g_module is not None + return os.path.split( Utils.g_module.root_path )[0] + +def _fix_import_path(): + """Adds the main script directory to be able to import waftools modules.""" + import_dir = _get_main_script_dir() + if import_dir not in sys.path: + sys.path.append( import_dir ) + +def _get_tool_dir(): + return os.path.join( main_script_dir, 'waftools' ) + +def set_options(opt): + """Always called first during the build.""" + _fix_import_path() + import waftools.log_output + waftools.log_output.set_options( opt ) + + # Adds command-line options for compiler + opt.tool_options('compiler_cxx') + + # from compiler_cxx tools, set_options + import Tools.ccroot as ccroot + opt.add_option('-d', '--debug-level', + action = 'store', + default = ccroot.DEBUG_LEVELS.RELEASE, + help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s'] " % "', '".join(ccroot.DEBUG_LEVELS.ALL) + + "[default: %default]", + choices = ccroot.DEBUG_LEVELS.ALL, + dest = 'debug_level') + +def init(): + """Called set_options() once the command-line has been parsed. + Command-line options value are accessed through Options.options. + """ + import waftools.log_output + waftools.log_output.init() + + +def configure(conf): + # There is a link issue with msvc 9! + conf.env['MSVC_VERSIONS'] = ['msvc 8.0'] + + # CXX=g++-3.0 ./waf.py configure will use g++-3.0 instead of 'g++' + conf.check_tool('compiler_cxx') + + # Select debug/optimize flags + debug_level = Options.options.debug_level.upper() + conf.env.append_unique('CXXFLAGS', conf.env['CXXFLAGS_' + debug_level]) + + compiler = conf.env['COMPILER_CXX'] + if compiler == 'msvc': # Microsoft Visual Studio specifics + # Select run-time library variant + if 'DEBUG' in debug_level: + crt_variant = 'MULTITHREADED_DLL_DBG' + else: + crt_variant = 'MULTITHREADED_DLL' + # MULTITHREADED, MULTITHREADED_DLL, MULTITHREADED_DBG, MULTITHREADED_DLL_DBG + conf.env.append_unique('CPPFLAGS', conf.env['CPPFLAGS_CRT_' + crt_variant]) + conf.env.append_unique('CPPDEFINES', conf.env['CPPDEFINES_CRT_' + crt_variant]) + + ## batched builds can be enabled by including the module optim_cc + # conf.check_tool('batched_cc') + + +# WAF command: + +def build(bld): + # process subfolders from here + bld.add_subdirs('''src/cpptl + src/cpput + src/cpputtest''') + + bld.add_subdirs( _get_example_dirs() ) + +def gen_examples_wscript(ctx): + for example_dir in _get_example_dirs(): + wscript_path = os.path.join( example_dir, 'wscript_build' ) + sources = glob.glob( os.path.join( example_dir, '*.cpp' ) ) + Logs.info( 'Generating "%s"' % wscript_path ) + open( wscript_path, 'wb' ).write( """\ +#! /usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +bld.new_task_gen( + features = 'cxx cprogram', + source = '''%(sources)s''', + includes = '../.. ../../include', # for examples/common + uselib_local = 'cpptl cpput', + name = 'example_%(name)s', + target = 'example_%(name)s' ) +""" % { + 'sources': ' '.join( [os.path.basename(s) for s in sources] ), + 'name': os.path.basename( example_dir ) + } ) + +def _fix_python_source( path, is_dry_run = True, verbose = True ): + """Makes sure that all sources have unix EOL and replace tabs with 4 spaces.""" + from waftools import reindent + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + + if verbose: + print '%s =>' % path, + try: + r = reindent.Reindenter(f) + finally: + f.close() + if r.run(): # File need to be fixed ? + if not is_dry_run: + f = open(path, "wb") + try: + r.write(f) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + elif verbose: + print ' OK' + return True + +def _fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True + + + +def _do_fix( is_dry_run = True ): + from waftools import antglob + python_sources = antglob.glob( '.', + includes = '**/*.py **/wscript **/wscript_build', + excludes = antglob.default_excludes + './waf.py', + prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) + for path in python_sources: + _fix_python_source( path, is_dry_run ) + + cpp_sources = antglob.glob( '.', + includes = '**/*.cpp **/*.h **/*.inl', + prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) + for path in cpp_sources: + _fix_source_eol( path, is_dry_run ) + + +def dry_fix(context): + _do_fix( is_dry_run = True ) + +def fix(context): + _do_fix( is_dry_run = False ) + +def shutdown(): + pass + +def check(context): + # Unit tests are run when "check" target is used + ut = UnitTest.unit_test() + ut.change_to_testfile_dir = True + ut.want_to_see_test_output = True + ut.want_to_see_test_error = True + ut.run() + ut.print_results() diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index c21a1e3..c00062a 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -14,6 +14,7 @@ import doxybuild import subprocess import xml.etree.ElementTree as ElementTree +import shutil SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' @@ -82,6 +83,15 @@ def svn_remove_tag( tag_url, message ): """ svn_command( 'delete', '-m', message, tag_url ) +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + if os.path.isdir( export_dir ): + shutil.rmtree( export_dir ) + svn_command( 'export', tag_url, export_dir ) + def main(): usage = """%prog release_version next_dev_version Update 'version' file to release_version and commit. @@ -119,17 +129,18 @@ def main(): print 'Setting version to', release_version set_version( release_version ) tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - print 'Generated doxygen document...' - doxybuild.build_doc( options, make_release=True ) +## if svn_check_if_tag_exist( tag_url ): +## if options.retag_release: +## svn_remove_tag( tag_url, 'Overwriting previous tag' ) +## else: +## print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url +## sys.exit( 1 ) +## svn_tag_sandbox( tag_url, 'Release ' + release_version ) +## print 'Generated doxygen document...' +## doxybuild.build_doc( options, make_release=True ) + svn_export( tag_url, 'dist/distcheck' ) #@todo: - # svn export + # fix-eol # source tarball # decompress source tarball # ?compile & run & check From ac8634ac8df14b95c906f75bf0f4bf42606c753d Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 20:34:45 +0000 Subject: [PATCH 111/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@111 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 207 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 167 files changed, 13875 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 82bdea6..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,207 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(os.path.join('doc', warning_log_path), 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 1aa5978..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 325b1d2..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index 9864178..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 84f56b6..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From a14b7a9a5079ffcdd9a45dab69c77fe2a51b3d5e Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 20:41:23 +0000 Subject: [PATCH 112/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@112 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 178 ++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 170 files changed, 14277 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..792bff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(os.path.join('doc', warning_log_path), 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..b928c62 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,178 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + if os.path.isdir( export_dir ): + shutil.rmtree( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + #@todo: + # decompress source tarball + # ?compile & run & check + # ?upload documentation + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 51615acaf1c386d86d9cf0847d5d1c081898a050 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 23 Feb 2010 21:00:30 +0000 Subject: [PATCH 113/268] - added the following step to make_release: fix EOL in distribution source, generate source tarball. - devtools/ was made into a python module and common utilities are being moved in this module git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@113 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/devtools/__init__.py | 1 + trunk/jsoncpp/devtools/fixeol.py | 63 ++++++++ trunk/jsoncpp/devtools/tarball.py | 41 ++++++ trunk/jsoncpp/devtools/wscript | 225 ----------------------------- trunk/jsoncpp/doxybuild.py | 44 +----- trunk/jsoncpp/makerelease.py | 50 +++++-- trunk/jsoncpp/version | 2 +- 7 files changed, 146 insertions(+), 280 deletions(-) create mode 100644 trunk/jsoncpp/devtools/__init__.py create mode 100644 trunk/jsoncpp/devtools/fixeol.py create mode 100644 trunk/jsoncpp/devtools/tarball.py delete mode 100644 trunk/jsoncpp/devtools/wscript diff --git a/trunk/jsoncpp/devtools/__init__.py b/trunk/jsoncpp/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/trunk/jsoncpp/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/trunk/jsoncpp/devtools/fixeol.py b/trunk/jsoncpp/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/trunk/jsoncpp/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/trunk/jsoncpp/devtools/tarball.py b/trunk/jsoncpp/devtools/tarball.py new file mode 100644 index 0000000..2ce261a --- /dev/null +++ b/trunk/jsoncpp/devtools/tarball.py @@ -0,0 +1,41 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() diff --git a/trunk/jsoncpp/devtools/wscript b/trunk/jsoncpp/devtools/wscript deleted file mode 100644 index 61b5183..0000000 --- a/trunk/jsoncpp/devtools/wscript +++ /dev/null @@ -1,225 +0,0 @@ -VERSION='0.1.0' -APPNAME='CppUnit2' -srcdir = '.' -blddir = 'build' - -import Options -import Logs -import UnitTest -import Utils -import os.path -import sys -import glob - -CPPUT_EXAMPLES = ''' - checking_assertions - ignore_failure_demo - input_test - light_fixture - log_demo - parametrized_test - stringize_demo - test_function - '''.split() - -BROKEN_CPPUT_EXAMPLES = ''' - input_based_test - opentest_demo - table_fixture - '''.split() - -def _get_example_dirs(): - return [ os.path.join( 'examples', d ) - for d in CPPUT_EXAMPLES ] - -def _get_main_script_dir(): - """Gets the path of the directory containing this script.""" - # The main script path is only valid once the it has been executed, hence this can not be a global var. - assert Utils.g_module is not None - return os.path.split( Utils.g_module.root_path )[0] - -def _fix_import_path(): - """Adds the main script directory to be able to import waftools modules.""" - import_dir = _get_main_script_dir() - if import_dir not in sys.path: - sys.path.append( import_dir ) - -def _get_tool_dir(): - return os.path.join( main_script_dir, 'waftools' ) - -def set_options(opt): - """Always called first during the build.""" - _fix_import_path() - import waftools.log_output - waftools.log_output.set_options( opt ) - - # Adds command-line options for compiler - opt.tool_options('compiler_cxx') - - # from compiler_cxx tools, set_options - import Tools.ccroot as ccroot - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s'] " % "', '".join(ccroot.DEBUG_LEVELS.ALL) + - "[default: %default]", - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - -def init(): - """Called set_options() once the command-line has been parsed. - Command-line options value are accessed through Options.options. - """ - import waftools.log_output - waftools.log_output.init() - - -def configure(conf): - # There is a link issue with msvc 9! - conf.env['MSVC_VERSIONS'] = ['msvc 8.0'] - - # CXX=g++-3.0 ./waf.py configure will use g++-3.0 instead of 'g++' - conf.check_tool('compiler_cxx') - - # Select debug/optimize flags - debug_level = Options.options.debug_level.upper() - conf.env.append_unique('CXXFLAGS', conf.env['CXXFLAGS_' + debug_level]) - - compiler = conf.env['COMPILER_CXX'] - if compiler == 'msvc': # Microsoft Visual Studio specifics - # Select run-time library variant - if 'DEBUG' in debug_level: - crt_variant = 'MULTITHREADED_DLL_DBG' - else: - crt_variant = 'MULTITHREADED_DLL' - # MULTITHREADED, MULTITHREADED_DLL, MULTITHREADED_DBG, MULTITHREADED_DLL_DBG - conf.env.append_unique('CPPFLAGS', conf.env['CPPFLAGS_CRT_' + crt_variant]) - conf.env.append_unique('CPPDEFINES', conf.env['CPPDEFINES_CRT_' + crt_variant]) - - ## batched builds can be enabled by including the module optim_cc - # conf.check_tool('batched_cc') - - -# WAF command: - -def build(bld): - # process subfolders from here - bld.add_subdirs('''src/cpptl - src/cpput - src/cpputtest''') - - bld.add_subdirs( _get_example_dirs() ) - -def gen_examples_wscript(ctx): - for example_dir in _get_example_dirs(): - wscript_path = os.path.join( example_dir, 'wscript_build' ) - sources = glob.glob( os.path.join( example_dir, '*.cpp' ) ) - Logs.info( 'Generating "%s"' % wscript_path ) - open( wscript_path, 'wb' ).write( """\ -#! /usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -bld.new_task_gen( - features = 'cxx cprogram', - source = '''%(sources)s''', - includes = '../.. ../../include', # for examples/common - uselib_local = 'cpptl cpput', - name = 'example_%(name)s', - target = 'example_%(name)s' ) -""" % { - 'sources': ' '.join( [os.path.basename(s) for s in sources] ), - 'name': os.path.basename( example_dir ) - } ) - -def _fix_python_source( path, is_dry_run = True, verbose = True ): - """Makes sure that all sources have unix EOL and replace tabs with 4 spaces.""" - from waftools import reindent - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - - if verbose: - print '%s =>' % path, - try: - r = reindent.Reindenter(f) - finally: - f.close() - if r.run(): # File need to be fixed ? - if not is_dry_run: - f = open(path, "wb") - try: - r.write(f) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - elif verbose: - print ' OK' - return True - -def _fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True - - - -def _do_fix( is_dry_run = True ): - from waftools import antglob - python_sources = antglob.glob( '.', - includes = '**/*.py **/wscript **/wscript_build', - excludes = antglob.default_excludes + './waf.py', - prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) - for path in python_sources: - _fix_python_source( path, is_dry_run ) - - cpp_sources = antglob.glob( '.', - includes = '**/*.cpp **/*.h **/*.inl', - prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) - for path in cpp_sources: - _fix_source_eol( path, is_dry_run ) - - -def dry_fix(context): - _do_fix( is_dry_run = True ) - -def fix(context): - _do_fix( is_dry_run = False ) - -def shutdown(): - pass - -def check(context): - # Unit tests are run when "check" target is used - ut = UnitTest.unit_test() - ut.change_to_testfile_dir = True - ut.want_to_see_test_output = True - ut.want_to_see_test_error = True - ut.run() - ut.print_results() diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py index 82bdea6..792bff7 100644 --- a/trunk/jsoncpp/doxybuild.py +++ b/trunk/jsoncpp/doxybuild.py @@ -6,47 +6,7 @@ import os.path import sys import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - +from devtools import tarball def find_program(filename): """find a program in folders path_lst, and sets env[var] @@ -171,7 +131,7 @@ def yesno( bool ): 'version' ] tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) def main(): usage = """%prog diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index c00062a..b928c62 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -15,6 +15,7 @@ import subprocess import xml.etree.ElementTree as ElementTree import shutil +from devtools import antglob, fixeol, tarball SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' @@ -92,6 +93,24 @@ def svn_export( tag_url, export_dir ): shutil.rmtree( export_dir ) svn_command( 'export', tag_url, export_dir ) +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + def main(): usage = """%prog release_version next_dev_version Update 'version' file to release_version and commit. @@ -129,19 +148,26 @@ def main(): print 'Setting version to', release_version set_version( release_version ) tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) -## if svn_check_if_tag_exist( tag_url ): -## if options.retag_release: -## svn_remove_tag( tag_url, 'Overwriting previous tag' ) -## else: -## print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url -## sys.exit( 1 ) -## svn_tag_sandbox( tag_url, 'Release ' + release_version ) -## print 'Generated doxygen document...' -## doxybuild.build_doc( options, make_release=True ) - svn_export( tag_url, 'dist/distcheck' ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) #@todo: - # fix-eol - # source tarball # decompress source tarball # ?compile & run & check # ?upload documentation diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index ab67193..79a2734 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.5.0-rc \ No newline at end of file +0.5.0 \ No newline at end of file From 4f1fd71aa9c2de3bc085495a7923826aab5f6cf8 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 07:35:41 +0000 Subject: [PATCH 114/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@114 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 178 -- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 170 files changed, 14277 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 792bff7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(os.path.join('doc', warning_log_path), 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index b928c62..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,178 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - if os.path.isdir( export_dir ): - shutil.rmtree( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' - doxybuild.build_doc( options, make_release=True ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - #@todo: - # decompress source tarball - # ?compile & run & check - # ?upload documentation - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From ce4b3ffda1bcd79897cc16af7562d1c43451fa0b Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 07:36:38 +0000 Subject: [PATCH 115/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@115 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 181 ++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14334 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..792bff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(os.path.join('doc', warning_log_path), 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..9dcdcf6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,181 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + if os.path.isdir( export_dir ): + shutil.rmtree( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + distcheck_dir = 'dist/distcheck' + print 'Decompressing source tarball to', distcheck_dir + tarball.decompress( source_tarball_path, distcheck_dir ) + #@todo: + # ?compile & run & check + # ?upload documentation + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 9affd33178f1dccd82abf796fa5f0649c7d81c01 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 08:05:41 +0000 Subject: [PATCH 116/268] - added source tarball decompression git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@116 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/devtools/tarball.py | 32 +++++++++++++++++++++---------- trunk/jsoncpp/makerelease.py | 5 ++++- 2 files changed, 26 insertions(+), 11 deletions(-) diff --git a/trunk/jsoncpp/devtools/tarball.py b/trunk/jsoncpp/devtools/tarball.py index 2ce261a..182602e 100644 --- a/trunk/jsoncpp/devtools/tarball.py +++ b/trunk/jsoncpp/devtools/tarball.py @@ -29,13 +29,25 @@ def visit(tar, dirname, names): path_in_tar = archive_name(path) tar.add(path, path_in_tar ) compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index b928c62..9dcdcf6 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -167,8 +167,11 @@ def main(): source_tarball_path = 'dist/%s.tar.gz' % source_dir print 'Generating source tarball to', source_tarball_path tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + distcheck_dir = 'dist/distcheck' + print 'Decompressing source tarball to', distcheck_dir + tarball.decompress( source_tarball_path, distcheck_dir ) #@todo: - # decompress source tarball # ?compile & run & check # ?upload documentation else: From d3f7fdd828ad3f98256c3d296086ff7ffb848cb3 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 20:43:26 +0000 Subject: [PATCH 117/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@117 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 181 -- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14334 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 792bff7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(os.path.join('doc', warning_log_path), 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 9dcdcf6..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - if os.path.isdir( export_dir ): - shutil.rmtree( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' - doxybuild.build_doc( options, make_release=True ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - distcheck_dir = 'dist/distcheck' - print 'Decompressing source tarball to', distcheck_dir - tarball.decompress( source_tarball_path, distcheck_dir ) - #@todo: - # ?compile & run & check - # ?upload documentation - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 6a24c635fbb29e7f5f535b517ec89af8d4266d08 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 20:45:50 +0000 Subject: [PATCH 118/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@118 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 248 ++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14401 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..792bff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(filename): + """find a program in folders path_lst, and sets env[var] + @param env: environmentA + @param filename: name of the program to search for + @param path_list: list of directories to search for filename + @param var: environment value to be checked for in env or os.environ + @return: either the value that is referenced with [var] in env or os.environ + or the first occurrence filename or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = '../build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': warning_log_path + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(os.path.join('doc', warning_log_path), 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..8d9565a --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,248 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + + #@todo: + # ?upload documentation + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 53fdcad622d15954b0361d97181977c5adcd1063 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:33:08 +0000 Subject: [PATCH 119/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@119 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 248 -- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14401 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 792bff7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(filename): - """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(os.path.join('doc', warning_log_path), 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 8d9565a..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,248 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, - help="""Skips build check.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' - doxybuild.build_doc( options, make_release=True ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - - #@todo: - # ?upload documentation - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 86758a4b10b0ab4d8aff33f6f4e2055bc8a932b4 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:34:06 +0000 Subject: [PATCH 120/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@120 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 166 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 344 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14496 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..5078fbc --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,166 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'build/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_path + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..80a2edb --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,344 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentatio upload' + else: + print 'No upload user specified. Documentation was not upload.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From ec99067a1fedec4b03498deae962edcc3c77f70b Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:36:32 +0000 Subject: [PATCH 121/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@121 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 166 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 344 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14496 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 5078fbc..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'build/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_path - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 80a2edb..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,344 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, - help="""Skips build check.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentatio upload' - else: - print 'No upload user specified. Documentation was not upload.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 6f97d14b34939039e6dd0caa2508381b355b3f63 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:37:29 +0000 Subject: [PATCH 122/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@122 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 166 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 344 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14496 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..8856765 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,166 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..80a2edb --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,344 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentatio upload' + else: + print 'No upload user specified. Documentation was not upload.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From acd14191db42a2f82bc579808638372e2a047430 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:40:21 +0000 Subject: [PATCH 123/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@123 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 166 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 344 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14496 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 8856765..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,166 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 80a2edb..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,344 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, - help="""Skips build check.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentatio upload' - else: - print 'No upload user specified. Documentation was not upload.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 7acca8f8d2505868e7535ebe6e27c80bb701b668 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:41:18 +0000 Subject: [PATCH 124/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@124 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 168 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 344 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14498 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0ac036f --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,168 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + full_output_dir = os.path.join( 'doc', output_dir ) + if os.path.isdir( full_output_dir ): + print 'Deleting directory:', full_output_dir + shutil.rmtree( full_output_dir ) + if not os.path.isdir( full_output_dir ): + os.makedirs( full_output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + full_output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..80a2edb --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,344 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentatio upload' + else: + print 'No upload user specified. Documentation was not upload.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 57f9597d4190ad774d598d8f5346754fe08a4c7c Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:45:38 +0000 Subject: [PATCH 125/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@125 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 168 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 344 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14498 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0ac036f..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,168 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - full_output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 80a2edb..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,344 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, - help="""Skips build check.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentatio upload' - else: - print 'No upload user specified. Documentation was not upload.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 6f73ca4bfe9d2ffc25b92db501eb9236574cc4d7 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 22:46:35 +0000 Subject: [PATCH 126/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@126 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 344 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14497 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0a2a6c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..80a2edb --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,344 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentatio upload' + else: + print 'No upload user specified. Documentation was not upload.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From fb540f591c85db8fd03d72d85da80417953d1405 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 24 Feb 2010 23:08:47 +0000 Subject: [PATCH 127/268] - doc is now generated in dist/doxygen - makerelease now decompress the tarball, download and install scons, run scons check on provided platforms, decompress doc tarball and upload the doc on the project web git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@127 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doxybuild.py | 48 +++++----- trunk/jsoncpp/makerelease.py | 177 +++++++++++++++++++++++++++++++++-- 2 files changed, 194 insertions(+), 31 deletions(-) diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py index 792bff7..0a2a6c7 100644 --- a/trunk/jsoncpp/doxybuild.py +++ b/trunk/jsoncpp/doxybuild.py @@ -8,22 +8,19 @@ import shutil from devtools import tarball -def find_program(filename): +def find_program(*filenames): """find a program in folders path_lst, and sets env[var] - @param env: environmentA - @param filename: name of the program to search for - @param path_list: list of directories to search for filename - @param var: environment value to be checked for in env or os.environ - @return: either the value that is referenced with [var] in env or os.environ - or the first occurrence filename or '' if filename could not be found + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found """ paths = os.environ.get('PATH', '').split(os.pathsep) suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path return '' def do_subst_in_file(targetfile, sourcefile, dict): @@ -87,36 +84,38 @@ def build_doc( options, make_release=False ): options.silent = True version = open('version','rt').read().strip() - output_dir = '../build/doxygen' # relative to doc/doxyfile location. + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) top_dir = os.path.abspath( '.' ) html_output_dirname = 'jsoncpp-api-html-' + version tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) def yesno( bool ): return bool and 'YES' or 'NO' subst_keys = { '%JSONCPP_VERSION%': version, '%DOC_TOPDIR%': '', '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ), + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), '%HAVE_DOT%': yesno(options.with_dot), '%DOT_PATH%': os.path.split(options.dot_path)[0], '%HTML_HELP%': yesno(options.with_html_help), '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': warning_log_path + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) } - full_output_dir = os.path.join( 'doc', output_dir ) - if os.path.isdir( full_output_dir ): - print 'Deleting directory:', full_output_dir - shutil.rmtree( full_output_dir ) - if not os.path.isdir( full_output_dir ): - os.makedirs( full_output_dir ) + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) if not options.silent: - print open(os.path.join('doc', warning_log_path), 'rb').read() + print open(warning_log_path, 'rb').read() index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) print 'Generated documentation can be found in:' print index_path @@ -126,12 +125,13 @@ def yesno( bool ): if options.make_tarball: print 'Generating doc tarball to', tarball_path tarball_sources = [ - full_output_dir, + output_dir, 'README.txt', 'version' ] - tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) + tarball_basedir = os.path.join( output_dir, html_output_dirname ) tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname def main(): usage = """%prog diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index 9dcdcf6..80a2edb 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -3,7 +3,7 @@ Requires Python 2.6 Example of invocation (use to test the script): -python makerelease.py --force --retag 0.5.0 0.6.0-dev +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev Example of invocation when doing a release: python makerelease.py 0.5.0 0.6.0-dev @@ -15,15 +15,25 @@ import subprocess import xml.etree.ElementTree as ElementTree import shutil +import urllib2 +import tempfile +import os +import time from devtools import antglob, fixeol, tarball SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' def set_version( version ): with open('version','wb') as f: f.write( version.strip() ) +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + class SVNError(Exception): pass @@ -89,8 +99,7 @@ def svn_export( tag_url, export_dir ): Target directory, including its parent is created if it does not exist. If the directory export_dir exist, it is deleted before export proceed. """ - if os.path.isdir( export_dir ): - shutil.rmtree( export_dir ) + rmdir_if_exist( export_dir ) svn_command( 'export', tag_url, export_dir ) def fix_sources_eol( dist_dir ): @@ -111,6 +120,114 @@ def fix_sources_eol( dist_dir ): for path in unix_sources: fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + + def main(): usage = """%prog release_version next_dev_version Update 'version' file to release_version and commit. @@ -120,7 +237,9 @@ def main(): Performs an svn export of tag release version, and build a source tarball. -Must be started in the project top directory. +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. """ from optparse import OptionParser parser = OptionParser(usage=usage) @@ -133,6 +252,14 @@ def main(): help="""Ignore pending commit. [Default: %default]""") parser.add_option('--retag', dest="retag_release", action='store_true', default=False, help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store', default=False, + help="""Skips build check.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") parser.enable_interspersed_args() options, args = parser.parse_args() @@ -140,6 +267,9 @@ def main(): parser.error( 'release_version missing on command-line.' ) release_version = args[0] + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + if options.ignore_pending_commit: msg = '' else: @@ -157,7 +287,12 @@ def main(): svn_tag_sandbox( tag_url, 'Release ' + release_version ) print 'Generated doxygen document...' - doxybuild.build_doc( options, make_release=True ) +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) export_dir = 'dist/export' svn_export( tag_url, export_dir ) @@ -168,12 +303,40 @@ def main(): print 'Generating source tarball to', source_tarball_path tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + # Decompress source tarball, download and install scons-local distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentatio upload' + else: + print 'No upload user specified. Documentation was not upload.' + print 'Tarball can be found at:', doc_tarball_path #@todo: - # ?compile & run & check - # ?upload documentation + #upload source & doc tarballs else: sys.stderr.write( msg + '\n' ) From 7bebdc85236550ab27a784ea51134b8ed2fda397 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 25 Feb 2010 07:45:14 +0000 Subject: [PATCH 128/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@128 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 344 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14497 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0a2a6c7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index 80a2edb..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,344 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, - help="""Skips build check.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentatio upload' - else: - print 'No upload user specified. Documentation was not upload.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 34e83ca73d25cad4cf802d256cbca72d484dee46 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 25 Feb 2010 07:51:43 +0000 Subject: [PATCH 129/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@129 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 359 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14512 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0a2a6c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..fe0014f --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,359 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Tarball uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 3931c6b78f51e40da723565bb96b8a5a3e507ba0 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 25 Feb 2010 07:53:45 +0000 Subject: [PATCH 130/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@130 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 359 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14512 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0a2a6c7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index fe0014f..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,359 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Tarball uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 91d35a6b15bab635a8eb536f34fa46fa0a18d0e9 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 25 Feb 2010 07:55:28 +0000 Subject: [PATCH 131/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@131 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 359 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 172 files changed, 14512 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..fc7b530 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,97 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represents integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- rewrite JSON document preserving original comments + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitely construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instruction are located in the file +README.txt in the top-directory of the project. + +Permanent link to the lastest revision of the file in subversion: +lastest README.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0a2a6c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..d575b70 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief Experimental and untested: base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief Experimental and untested: const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Experimental and untested: iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..f6826c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,359 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Tarball uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + #@todo: + #upload source & doc tarballs + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 7035f6a75b001a0713483460aea02e27c9a88152 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 25 Feb 2010 08:30:09 +0000 Subject: [PATCH 132/268] - added --no-web to skip upload to web site - added automatic upload of source and documentation tarball on frs.sourceforge.net git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@132 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/makerelease.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index 80a2edb..659c2d9 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -227,6 +227,13 @@ def sourceforge_web_synchro( sourceforge_project, doc_dir, mput %s exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + def main(): usage = """%prog release_version next_dev_version @@ -254,8 +261,10 @@ def main(): help="""Overwrite release existing tag if it exist. [Default: %default]""") parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store', default=False, + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") parser.add_option('-u', '--upload-user', dest="user", action='store', help="""Sourceforge user for SFTP documentation upload.""") parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), @@ -329,11 +338,17 @@ def main(): svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) sys.exit(1) if options.user: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentatio upload' + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' else: - print 'No upload user specified. Documentation was not upload.' + print 'No upload user specified. Web site and download tarbal were not uploaded.' print 'Tarball can be found at:', doc_tarball_path #@todo: #upload source & doc tarballs From 2bc46baa1928e272490561fd218e29ceeff4c4fc Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 11 Mar 2010 20:23:07 +0000 Subject: [PATCH 133/268] Removed experimental notification on iterators, and added experimental status for allocator (to be removed) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@133 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/LICENSE | 1 + trunk/jsoncpp/include/json/value.h | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 trunk/jsoncpp/LICENSE diff --git a/trunk/jsoncpp/LICENSE b/trunk/jsoncpp/LICENSE new file mode 100644 index 0000000..d20fb29 --- /dev/null +++ b/trunk/jsoncpp/LICENSE @@ -0,0 +1 @@ +The json-cpp library and this documentation are in Public Domain. diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index d575b70..58bfd88 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -513,7 +513,7 @@ namespace Json { Args args_; }; - /** \brief Allocator to customize member name and string value memory management done by Value. + /** \brief Experimental do not use: Allocator to customize member name and string value memory management done by Value. * * - makeMemberName() and releaseMemberName() are called to respectively duplicate and * free an Json::objectValue member name. @@ -785,7 +785,7 @@ namespace Json { PageIndex pageCount_; }; - /** \brief Allocator to customize Value internal array. + /** \brief Experimental: do not use. Allocator to customize Value internal array. * Below is an example of a simple implementation (actual implementation use * memory pool). \code @@ -873,7 +873,7 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator #endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Experimental and untested: base class for Value iterators. + /** \brief base class for Value iterators. * */ class ValueIteratorBase @@ -943,7 +943,7 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator #endif }; - /** \brief Experimental and untested: const iterator for object and array value. + /** \brief const iterator for object and array value. * */ class ValueConstIterator : public ValueIteratorBase @@ -1002,7 +1002,7 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator }; - /** \brief Experimental and untested: iterator for object and array value. + /** \brief Iterator for object and array value. */ class ValueIterator : public ValueIteratorBase { From 3526585c676cdf25e7f864192a83e2779a8e0bb3 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 11 Mar 2010 21:02:26 +0000 Subject: [PATCH 134/268] - fixed typos and added "download" section to documentation - commit version numbers after release git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@134 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/jsoncpp.dox | 29 ++++++++++++++++++++++++----- trunk/jsoncpp/makerelease.py | 17 +++++++++++++---- 2 files changed, 37 insertions(+), 9 deletions(-) diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index fc7b530..abaac6c 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -4,7 +4,7 @@ JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and +It can represent integer, real number, string, an ordered sequence of value, and a collection of name/value pairs. Here is an example of JSON data: @@ -28,8 +28,16 @@ Here is an example of JSON data: \section _features Features - read and write JSON document +- attach C and C++ style comments to element during parsing - rewrite JSON document preserving original comments +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + \code Json::Value root; // will contains the root value after parsing. Json::Reader reader; @@ -57,7 +65,7 @@ setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); // ... // At application shutdown to make the new configuration document: // Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: +// necessary to explicitly construct the Json::Value object: root["encoding"] = getCurrentEncoding(); root["indent"]["length"] = getCurrentIndentLength(); root["indent"]["use_space"] = getCurrentIndentUseSpace(); @@ -75,11 +83,22 @@ std::cout << root; \endcode \section _plinks Build instructions -The build instruction are located in the file +The build instructions are located in the file README.txt in the top-directory of the project. -Permanent link to the lastest revision of the file in subversion: -lastest README.txt +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. \section _plinks Project links - json-cpp home diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index 659c2d9..b760fae 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -58,7 +58,7 @@ def check_no_pending_commit(): for entry in etree.getiterator( 'entry' ): path = entry.get('path') status = entry.find('wc-status').get('item') - if status != 'unversioned': + if status != 'unversioned' and path != 'version': msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) if msg: msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) @@ -84,6 +84,11 @@ def svn_check_if_tag_exist( tag_url ): return False return True +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + def svn_tag_sandbox( tag_url, message ): """Makes a tag based on the sandbox revisions. """ @@ -272,9 +277,10 @@ def main(): parser.enable_interspersed_args() options, args = parser.parse_args() - if len(args) < 1: + if len(args) != 2: parser.error( 'release_version missing on command-line.' ) release_version = args[0] + next_version = args[1] if not options.platforms and not options.no_test: parser.error( 'You must specify either --platform or --no-test option.' ) @@ -286,6 +292,7 @@ def main(): if not msg: print 'Setting version to', release_version set_version( release_version ) + svn_commit( 'Release ' + release_version ) tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) if svn_check_if_tag_exist( tag_url ): if options.retag_release: @@ -350,8 +357,10 @@ def main(): else: print 'No upload user specified. Web site and download tarbal were not uploaded.' print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) else: sys.stderr.write( msg + '\n' ) From 176e4e1c4e6d8c0a76fd578b7e8be2c9f5cccff5 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 11 Mar 2010 21:05:18 +0000 Subject: [PATCH 135/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@135 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 97 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 359 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 172 files changed, 14512 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index fc7b530..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,97 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represents integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- rewrite JSON document preserving original comments - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitely construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instruction are located in the file -README.txt in the top-directory of the project. - -Permanent link to the lastest revision of the file in subversion: -lastest README.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0a2a6c7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index d575b70..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief Experimental and untested: base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief Experimental and untested: const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Experimental and untested: iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index f6826c2..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,359 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Tarball uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - #@todo: - #upload source & doc tarballs - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 5bf42daff8a98088021bc8eb4e0f1e73dfe087f2 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 11 Mar 2010 21:09:07 +0000 Subject: [PATCH 136/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@136 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/LICENSE | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 116 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 368 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 173 files changed, 14541 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/LICENSE create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/LICENSE b/tags/jsoncpp/0.5.0/LICENSE new file mode 100644 index 0000000..d20fb29 --- /dev/null +++ b/tags/jsoncpp/0.5.0/LICENSE @@ -0,0 +1 @@ +The json-cpp library and this documentation are in Public Domain. diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..abaac6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,116 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0a2a6c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..58bfd88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Experimental do not use: Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..b760fae --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,368 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From aca5fbfe584be764bb9c73c34e03fb17c8a8900e Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Mar 2010 07:29:28 +0000 Subject: [PATCH 137/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@137 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 - tags/jsoncpp/0.5.0/LICENSE | 1 - tags/jsoncpp/0.5.0/README.txt | 117 - tags/jsoncpp/0.5.0/SConstruct | 235 -- tags/jsoncpp/0.5.0/devtools/__init__.py | 1 - tags/jsoncpp/0.5.0/devtools/antglob.py | 201 -- tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 - tags/jsoncpp/0.5.0/devtools/tarball.py | 53 - tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.5.0/doc/footer.html | 23 - tags/jsoncpp/0.5.0/doc/header.html | 24 - tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 116 - tags/jsoncpp/0.5.0/doc/readme.txt | 1 - tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 - tags/jsoncpp/0.5.0/doxybuild.py | 167 -- tags/jsoncpp/0.5.0/include/json/autolink.h | 19 - tags/jsoncpp/0.5.0/include/json/config.h | 43 - tags/jsoncpp/0.5.0/include/json/features.h | 42 - tags/jsoncpp/0.5.0/include/json/forwards.h | 39 - tags/jsoncpp/0.5.0/include/json/json.h | 10 - tags/jsoncpp/0.5.0/include/json/reader.h | 196 -- tags/jsoncpp/0.5.0/include/json/value.h | 1069 --------- tags/jsoncpp/0.5.0/include/json/writer.h | 174 -- tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 - .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 - .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 -- .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.5.0/makerelease.py | 368 --- tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 - tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 -- tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 - .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 -- .../0.5.0/src/jsontestrunner/sconscript | 9 - .../0.5.0/src/lib_json/json_batchallocator.h | 125 - .../0.5.0/src/lib_json/json_internalarray.inl | 448 ---- .../0.5.0/src/lib_json/json_internalmap.inl | 607 ----- .../0.5.0/src/lib_json/json_reader.cpp | 885 ------- .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 ------------- .../0.5.0/src/lib_json/json_valueiterator.inl | 292 --- .../0.5.0/src/lib_json/json_writer.cpp | 829 ------- tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 - .../0.5.0/src/test_lib_json/jsontest.cpp | 603 ----- .../0.5.0/src/test_lib_json/jsontest.h | 254 -- tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 -- .../0.5.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.5.0/test/cleantests.py | 10 - .../0.5.0/test/data/test_array_01.expected | 1 - .../0.5.0/test/data/test_array_01.json | 1 - .../0.5.0/test/data/test_array_02.expected | 2 - .../0.5.0/test/data/test_array_02.json | 1 - .../0.5.0/test/data/test_array_03.expected | 6 - .../0.5.0/test/data/test_array_03.json | 1 - .../0.5.0/test/data/test_array_04.expected | 5 - .../0.5.0/test/data/test_array_04.json | 1 - .../0.5.0/test/data/test_array_05.expected | 100 - .../0.5.0/test/data/test_array_05.json | 1 - .../0.5.0/test/data/test_array_06.expected | 5 - .../0.5.0/test/data/test_array_06.json | 4 - .../0.5.0/test/data/test_basic_01.expected | 1 - .../0.5.0/test/data/test_basic_01.json | 1 - .../0.5.0/test/data/test_basic_02.expected | 1 - .../0.5.0/test/data/test_basic_02.json | 1 - .../0.5.0/test/data/test_basic_03.expected | 3 - .../0.5.0/test/data/test_basic_03.json | 3 - .../0.5.0/test/data/test_basic_04.expected | 2 - .../0.5.0/test/data/test_basic_04.json | 2 - .../0.5.0/test/data/test_basic_05.expected | 2 - .../0.5.0/test/data/test_basic_05.json | 2 - .../0.5.0/test/data/test_basic_06.expected | 2 - .../0.5.0/test/data/test_basic_06.json | 2 - .../0.5.0/test/data/test_basic_07.expected | 2 - .../0.5.0/test/data/test_basic_07.json | 2 - .../0.5.0/test/data/test_basic_08.expected | 2 - .../0.5.0/test/data/test_basic_08.json | 3 - .../0.5.0/test/data/test_basic_09.expected | 2 - .../0.5.0/test/data/test_basic_09.json | 4 - .../0.5.0/test/data/test_comment_01.expected | 8 - .../0.5.0/test/data/test_comment_01.json | 8 - .../0.5.0/test/data/test_complex_01.expected | 20 - .../0.5.0/test/data/test_complex_01.json | 17 - .../0.5.0/test/data/test_integer_01.expected | 1 - .../0.5.0/test/data/test_integer_01.json | 2 - .../0.5.0/test/data/test_integer_02.expected | 1 - .../0.5.0/test/data/test_integer_02.json | 2 - .../0.5.0/test/data/test_integer_03.expected | 1 - .../0.5.0/test/data/test_integer_03.json | 2 - .../0.5.0/test/data/test_integer_04.expected | 2 - .../0.5.0/test/data/test_integer_04.json | 3 - .../0.5.0/test/data/test_integer_05.expected | 2 - .../0.5.0/test/data/test_integer_05.json | 2 - .../0.5.0/test/data/test_large_01.expected | 2122 ----------------- .../0.5.0/test/data/test_large_01.json | 2 - .../0.5.0/test/data/test_object_01.expected | 1 - .../0.5.0/test/data/test_object_01.json | 1 - .../0.5.0/test/data/test_object_02.expected | 2 - .../0.5.0/test/data/test_object_02.json | 1 - .../0.5.0/test/data/test_object_03.expected | 4 - .../0.5.0/test/data/test_object_03.json | 5 - .../0.5.0/test/data/test_object_04.expected | 2 - .../0.5.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.5.0/test/data/test_real_01.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 - .../0.5.0/test/data/test_real_02.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 - .../0.5.0/test/data/test_real_03.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 - .../0.5.0/test/data/test_real_04.expected | 2 - .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 - .../0.5.0/test/data/test_real_05.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 - .../0.5.0/test/data/test_real_06.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 - .../0.5.0/test/data/test_real_07.expected | 3 - .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 - .../0.5.0/test/data/test_string_01.expected | 1 - .../0.5.0/test/data/test_string_01.json | 1 - .../0.5.0/test/data/test_string_02.expected | 1 - .../0.5.0/test/data/test_string_02.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - tags/jsoncpp/0.5.0/test/generate_expected.py | 11 - .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 - .../0.5.0/test/jsonchecker/fail10.json | 1 - .../0.5.0/test/jsonchecker/fail11.json | 1 - .../0.5.0/test/jsonchecker/fail12.json | 1 - .../0.5.0/test/jsonchecker/fail13.json | 1 - .../0.5.0/test/jsonchecker/fail14.json | 1 - .../0.5.0/test/jsonchecker/fail15.json | 1 - .../0.5.0/test/jsonchecker/fail16.json | 1 - .../0.5.0/test/jsonchecker/fail17.json | 1 - .../0.5.0/test/jsonchecker/fail18.json | 1 - .../0.5.0/test/jsonchecker/fail19.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 - .../0.5.0/test/jsonchecker/fail20.json | 1 - .../0.5.0/test/jsonchecker/fail21.json | 1 - .../0.5.0/test/jsonchecker/fail22.json | 1 - .../0.5.0/test/jsonchecker/fail23.json | 1 - .../0.5.0/test/jsonchecker/fail24.json | 1 - .../0.5.0/test/jsonchecker/fail25.json | 1 - .../0.5.0/test/jsonchecker/fail26.json | 1 - .../0.5.0/test/jsonchecker/fail27.json | 2 - .../0.5.0/test/jsonchecker/fail28.json | 2 - .../0.5.0/test/jsonchecker/fail29.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 - .../0.5.0/test/jsonchecker/fail30.json | 1 - .../0.5.0/test/jsonchecker/fail31.json | 1 - .../0.5.0/test/jsonchecker/fail32.json | 1 - .../0.5.0/test/jsonchecker/fail33.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 - .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 - .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 - .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 - tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.5.0/test/runjsontests.py | 134 -- tags/jsoncpp/0.5.0/test/rununittests.py | 73 - tags/jsoncpp/0.5.0/version | 1 - 173 files changed, 14541 deletions(-) delete mode 100644 tags/jsoncpp/0.5.0/AUTHORS delete mode 100644 tags/jsoncpp/0.5.0/LICENSE delete mode 100644 tags/jsoncpp/0.5.0/README.txt delete mode 100644 tags/jsoncpp/0.5.0/SConstruct delete mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.5.0/doc/footer.html delete mode 100644 tags/jsoncpp/0.5.0/doc/header.html delete mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.5.0/doxybuild.py delete mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/config.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/features.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/json.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/value.h delete mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.5.0/makerelease.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py delete mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.5.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/LICENSE b/tags/jsoncpp/0.5.0/LICENSE deleted file mode 100644 index d20fb29..0000000 --- a/tags/jsoncpp/0.5.0/LICENSE +++ /dev/null @@ -1 +0,0 @@ -The json-cpp library and this documentation are in Public Domain. diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt deleted file mode 100644 index ed7ef8f..0000000 --- a/tags/jsoncpp/0.5.0/README.txt +++ /dev/null @@ -1,117 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct deleted file mode 100644 index 0499db9..0000000 --- a/tags/jsoncpp/0.5.0/SConstruct +++ /dev/null @@ -1,235 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - vars = {} - for name in ('PATH', 'TEMP', 'TMP'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.5.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.5.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.5.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.5.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.5.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.5.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html deleted file mode 100644 index d56ea59..0000000 --- a/tags/jsoncpp/0.5.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox deleted file mode 100644 index abaac6c..0000000 --- a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox +++ /dev/null @@ -1,116 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space" = true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _plinks Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -The json-cpp library and this documentation are in Public Domain. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.5.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox deleted file mode 100644 index 7f3aa1a..0000000 --- a/tags/jsoncpp/0.5.0/doc/roadmap.dox +++ /dev/null @@ -1,32 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - - Release on sourceforge download - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py deleted file mode 100644 index 0a2a6c7..0000000 --- a/tags/jsoncpp/0.5.0/doxybuild.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h deleted file mode 100644 index 37c9258..0000000 --- a/tags/jsoncpp/0.5.0/include/json/autolink.h +++ /dev/null @@ -1,19 +0,0 @@ -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h deleted file mode 100644 index 5d334cb..0000000 --- a/tags/jsoncpp/0.5.0/include/json/config.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h deleted file mode 100644 index f1404f6..0000000 --- a/tags/jsoncpp/0.5.0/include/json/features.h +++ /dev/null @@ -1,42 +0,0 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h deleted file mode 100644 index d0ce830..0000000 --- a/tags/jsoncpp/0.5.0/include/json/forwards.h +++ /dev/null @@ -1,39 +0,0 @@ -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -# include "config.h" - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef int Int; - typedef unsigned int UInt; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h deleted file mode 100644 index c71ed65..0000000 --- a/tags/jsoncpp/0.5.0/include/json/json.h +++ /dev/null @@ -1,10 +0,0 @@ -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h deleted file mode 100644 index ee1d6a2..0000000 --- a/tags/jsoncpp/0.5.0/include/json/reader.h +++ /dev/null @@ -1,196 +0,0 @@ -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -# include "features.h" -# include "value.h" -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormatedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h deleted file mode 100644 index 58bfd88..0000000 --- a/tags/jsoncpp/0.5.0/include/json/value.h +++ /dev/null @@ -1,1069 +0,0 @@ -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -# include "forwards.h" -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; - typedef UInt ArrayIndex; - - static const Value null; - static const Int minInt; - static const Int maxInt; - static const UInt maxUInt; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( int index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - int index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - int index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - UInt size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( UInt size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( UInt index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( UInt index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - Int int_; - UInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( UInt index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - UInt index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - /** \brief Experimental do not use: Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h deleted file mode 100644 index 5f4b83b..0000000 --- a/tags/jsoncpp/0.5.0/include/json/writer.h +++ /dev/null @@ -1,174 +0,0 @@ -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -# include "value.h" -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py deleted file mode 100644 index b760fae..0000000 --- a/tags/jsoncpp/0.5.0/makerelease.py +++ /dev/null @@ -1,368 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.5.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp deleted file mode 100644 index 231ee0c..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,233 +0,0 @@ -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 87ea5ed..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,125 +0,0 @@ -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 9b985d2..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,448 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index 1977148..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,607 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = valueAllocator()->makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 4eb2d11..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,885 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; - Value::UInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); - } - if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -std::string -Reader::getFormatedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp deleted file mode 100644 index 573205f..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1718 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); - -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} - -ValueAllocator::~ValueAllocator() -{ -} - -class DefaultValueAllocator : public ValueAllocator -{ -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } - - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; - -static ValueAllocator *&valueAllocator() -{ - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; -} - -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); - JSON_ASSERT( text ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( int index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -int -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -Value::UInt -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return Int( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( UInt newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( UInt index = newSize; index < oldSize; ++index ) - value_.map_->erase( index ); - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( UInt index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -const Value & -Value::operator[]( UInt index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( UInt index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( UInt index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( Value::UInt index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - Value::UInt index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 736e260..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,292 +0,0 @@ -// included by json_value.cpp -// everything is within Json namespace - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp deleted file mode 100644 index cdf4188..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,829 +0,0 @@ -#include -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -std::string valueToString( Int value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( UInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( UInt value ) -{ - char buffer[32]; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asInt() ); - break; - case uintValue: - document_ += valueToString( value.asUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.5.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index a07d0fe..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,603 +0,0 @@ -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 8f0bd31..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,254 +0,0 @@ -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp deleted file mode 100644 index b80776d..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,244 +0,0 @@ -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.5.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py deleted file mode 100644 index 800337d..0000000 --- a/tags/jsoncpp/0.5.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.5.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version deleted file mode 100644 index 79a2734..0000000 --- a/tags/jsoncpp/0.5.0/version +++ /dev/null @@ -1 +0,0 @@ -0.5.0 \ No newline at end of file From 2f4b7d39616ca9535a50df49f2395fe0153e03e0 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Mar 2010 07:31:04 +0000 Subject: [PATCH 138/268] Release 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@138 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.5.0/AUTHORS | 1 + tags/jsoncpp/0.5.0/LICENSE | 1 + tags/jsoncpp/0.5.0/README.txt | 117 + tags/jsoncpp/0.5.0/SConstruct | 235 ++ tags/jsoncpp/0.5.0/devtools/__init__.py | 1 + tags/jsoncpp/0.5.0/devtools/antglob.py | 201 ++ tags/jsoncpp/0.5.0/devtools/fixeol.py | 63 + tags/jsoncpp/0.5.0/devtools/tarball.py | 53 + tags/jsoncpp/0.5.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.5.0/doc/footer.html | 23 + tags/jsoncpp/0.5.0/doc/header.html | 24 + tags/jsoncpp/0.5.0/doc/jsoncpp.dox | 116 + tags/jsoncpp/0.5.0/doc/readme.txt | 1 + tags/jsoncpp/0.5.0/doc/roadmap.dox | 32 + tags/jsoncpp/0.5.0/doxybuild.py | 167 ++ tags/jsoncpp/0.5.0/include/json/autolink.h | 19 + tags/jsoncpp/0.5.0/include/json/config.h | 43 + tags/jsoncpp/0.5.0/include/json/features.h | 42 + tags/jsoncpp/0.5.0/include/json/forwards.h | 39 + tags/jsoncpp/0.5.0/include/json/json.h | 10 + tags/jsoncpp/0.5.0/include/json/reader.h | 196 ++ tags/jsoncpp/0.5.0/include/json/value.h | 1069 +++++++++ tags/jsoncpp/0.5.0/include/json/writer.h | 174 ++ tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln | 46 + .../0.5.0/makefiles/vs71/jsontest.vcproj | 119 + .../0.5.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../0.5.0/makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.5.0/makerelease.py | 368 +++ tags/jsoncpp/0.5.0/scons-tools/globtool.py | 53 + tags/jsoncpp/0.5.0/scons-tools/srcdist.py | 179 ++ tags/jsoncpp/0.5.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.5.0/scons-tools/targz.py | 82 + .../jsoncpp/0.5.0/src/jsontestrunner/main.cpp | 233 ++ .../0.5.0/src/jsontestrunner/sconscript | 9 + .../0.5.0/src/lib_json/json_batchallocator.h | 125 + .../0.5.0/src/lib_json/json_internalarray.inl | 448 ++++ .../0.5.0/src/lib_json/json_internalmap.inl | 607 +++++ .../0.5.0/src/lib_json/json_reader.cpp | 885 +++++++ .../jsoncpp/0.5.0/src/lib_json/json_value.cpp | 1718 +++++++++++++ .../0.5.0/src/lib_json/json_valueiterator.inl | 292 +++ .../0.5.0/src/lib_json/json_writer.cpp | 829 +++++++ tags/jsoncpp/0.5.0/src/lib_json/sconscript | 8 + .../0.5.0/src/test_lib_json/jsontest.cpp | 603 +++++ .../0.5.0/src/test_lib_json/jsontest.h | 254 ++ tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp | 244 ++ .../0.5.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.5.0/test/cleantests.py | 10 + .../0.5.0/test/data/test_array_01.expected | 1 + .../0.5.0/test/data/test_array_01.json | 1 + .../0.5.0/test/data/test_array_02.expected | 2 + .../0.5.0/test/data/test_array_02.json | 1 + .../0.5.0/test/data/test_array_03.expected | 6 + .../0.5.0/test/data/test_array_03.json | 1 + .../0.5.0/test/data/test_array_04.expected | 5 + .../0.5.0/test/data/test_array_04.json | 1 + .../0.5.0/test/data/test_array_05.expected | 100 + .../0.5.0/test/data/test_array_05.json | 1 + .../0.5.0/test/data/test_array_06.expected | 5 + .../0.5.0/test/data/test_array_06.json | 4 + .../0.5.0/test/data/test_basic_01.expected | 1 + .../0.5.0/test/data/test_basic_01.json | 1 + .../0.5.0/test/data/test_basic_02.expected | 1 + .../0.5.0/test/data/test_basic_02.json | 1 + .../0.5.0/test/data/test_basic_03.expected | 3 + .../0.5.0/test/data/test_basic_03.json | 3 + .../0.5.0/test/data/test_basic_04.expected | 2 + .../0.5.0/test/data/test_basic_04.json | 2 + .../0.5.0/test/data/test_basic_05.expected | 2 + .../0.5.0/test/data/test_basic_05.json | 2 + .../0.5.0/test/data/test_basic_06.expected | 2 + .../0.5.0/test/data/test_basic_06.json | 2 + .../0.5.0/test/data/test_basic_07.expected | 2 + .../0.5.0/test/data/test_basic_07.json | 2 + .../0.5.0/test/data/test_basic_08.expected | 2 + .../0.5.0/test/data/test_basic_08.json | 3 + .../0.5.0/test/data/test_basic_09.expected | 2 + .../0.5.0/test/data/test_basic_09.json | 4 + .../0.5.0/test/data/test_comment_01.expected | 8 + .../0.5.0/test/data/test_comment_01.json | 8 + .../0.5.0/test/data/test_complex_01.expected | 20 + .../0.5.0/test/data/test_complex_01.json | 17 + .../0.5.0/test/data/test_integer_01.expected | 1 + .../0.5.0/test/data/test_integer_01.json | 2 + .../0.5.0/test/data/test_integer_02.expected | 1 + .../0.5.0/test/data/test_integer_02.json | 2 + .../0.5.0/test/data/test_integer_03.expected | 1 + .../0.5.0/test/data/test_integer_03.json | 2 + .../0.5.0/test/data/test_integer_04.expected | 2 + .../0.5.0/test/data/test_integer_04.json | 3 + .../0.5.0/test/data/test_integer_05.expected | 2 + .../0.5.0/test/data/test_integer_05.json | 2 + .../0.5.0/test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.5.0/test/data/test_large_01.json | 2 + .../0.5.0/test/data/test_object_01.expected | 1 + .../0.5.0/test/data/test_object_01.json | 1 + .../0.5.0/test/data/test_object_02.expected | 2 + .../0.5.0/test/data/test_object_02.json | 1 + .../0.5.0/test/data/test_object_03.expected | 4 + .../0.5.0/test/data/test_object_03.json | 5 + .../0.5.0/test/data/test_object_04.expected | 2 + .../0.5.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.5.0/test/data/test_real_01.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_01.json | 3 + .../0.5.0/test/data/test_real_02.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_02.json | 3 + .../0.5.0/test/data/test_real_03.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_03.json | 3 + .../0.5.0/test/data/test_real_04.expected | 2 + .../jsoncpp/0.5.0/test/data/test_real_04.json | 3 + .../0.5.0/test/data/test_real_05.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_05.json | 3 + .../0.5.0/test/data/test_real_06.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_06.json | 3 + .../0.5.0/test/data/test_real_07.expected | 3 + .../jsoncpp/0.5.0/test/data/test_real_07.json | 3 + .../0.5.0/test/data/test_string_01.expected | 1 + .../0.5.0/test/data/test_string_01.json | 1 + .../0.5.0/test/data/test_string_02.expected | 1 + .../0.5.0/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + tags/jsoncpp/0.5.0/test/generate_expected.py | 11 + .../jsoncpp/0.5.0/test/jsonchecker/fail1.json | 1 + .../0.5.0/test/jsonchecker/fail10.json | 1 + .../0.5.0/test/jsonchecker/fail11.json | 1 + .../0.5.0/test/jsonchecker/fail12.json | 1 + .../0.5.0/test/jsonchecker/fail13.json | 1 + .../0.5.0/test/jsonchecker/fail14.json | 1 + .../0.5.0/test/jsonchecker/fail15.json | 1 + .../0.5.0/test/jsonchecker/fail16.json | 1 + .../0.5.0/test/jsonchecker/fail17.json | 1 + .../0.5.0/test/jsonchecker/fail18.json | 1 + .../0.5.0/test/jsonchecker/fail19.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail2.json | 1 + .../0.5.0/test/jsonchecker/fail20.json | 1 + .../0.5.0/test/jsonchecker/fail21.json | 1 + .../0.5.0/test/jsonchecker/fail22.json | 1 + .../0.5.0/test/jsonchecker/fail23.json | 1 + .../0.5.0/test/jsonchecker/fail24.json | 1 + .../0.5.0/test/jsonchecker/fail25.json | 1 + .../0.5.0/test/jsonchecker/fail26.json | 1 + .../0.5.0/test/jsonchecker/fail27.json | 2 + .../0.5.0/test/jsonchecker/fail28.json | 2 + .../0.5.0/test/jsonchecker/fail29.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail3.json | 1 + .../0.5.0/test/jsonchecker/fail30.json | 1 + .../0.5.0/test/jsonchecker/fail31.json | 1 + .../0.5.0/test/jsonchecker/fail32.json | 1 + .../0.5.0/test/jsonchecker/fail33.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail4.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail5.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail6.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail7.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail8.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/fail9.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass1.json | 58 + .../jsoncpp/0.5.0/test/jsonchecker/pass2.json | 1 + .../jsoncpp/0.5.0/test/jsonchecker/pass3.json | 6 + .../jsoncpp/0.5.0/test/jsonchecker/readme.txt | 3 + tags/jsoncpp/0.5.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.5.0/test/runjsontests.py | 134 ++ tags/jsoncpp/0.5.0/test/rununittests.py | 73 + tags/jsoncpp/0.5.0/version | 1 + 173 files changed, 14541 insertions(+) create mode 100644 tags/jsoncpp/0.5.0/AUTHORS create mode 100644 tags/jsoncpp/0.5.0/LICENSE create mode 100644 tags/jsoncpp/0.5.0/README.txt create mode 100644 tags/jsoncpp/0.5.0/SConstruct create mode 100644 tags/jsoncpp/0.5.0/devtools/__init__.py create mode 100644 tags/jsoncpp/0.5.0/devtools/antglob.py create mode 100644 tags/jsoncpp/0.5.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.5.0/devtools/tarball.py create mode 100644 tags/jsoncpp/0.5.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.5.0/doc/footer.html create mode 100644 tags/jsoncpp/0.5.0/doc/header.html create mode 100644 tags/jsoncpp/0.5.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.5.0/doc/readme.txt create mode 100644 tags/jsoncpp/0.5.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.5.0/doxybuild.py create mode 100644 tags/jsoncpp/0.5.0/include/json/autolink.h create mode 100644 tags/jsoncpp/0.5.0/include/json/config.h create mode 100644 tags/jsoncpp/0.5.0/include/json/features.h create mode 100644 tags/jsoncpp/0.5.0/include/json/forwards.h create mode 100644 tags/jsoncpp/0.5.0/include/json/json.h create mode 100644 tags/jsoncpp/0.5.0/include/json/reader.h create mode 100644 tags/jsoncpp/0.5.0/include/json/value.h create mode 100644 tags/jsoncpp/0.5.0/include/json/writer.h create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.5.0/makerelease.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.5.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.5.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.5.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.5.0/test/cleantests.py create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.5.0/test/generate_expected.py create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.5.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.5.0/test/runjsontests.py create mode 100644 tags/jsoncpp/0.5.0/test/rununittests.py create mode 100644 tags/jsoncpp/0.5.0/version diff --git a/tags/jsoncpp/0.5.0/AUTHORS b/tags/jsoncpp/0.5.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.5.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.5.0/LICENSE b/tags/jsoncpp/0.5.0/LICENSE new file mode 100644 index 0000000..d20fb29 --- /dev/null +++ b/tags/jsoncpp/0.5.0/LICENSE @@ -0,0 +1 @@ +The json-cpp library and this documentation are in Public Domain. diff --git a/tags/jsoncpp/0.5.0/README.txt b/tags/jsoncpp/0.5.0/README.txt new file mode 100644 index 0000000..ed7ef8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/README.txt @@ -0,0 +1,117 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/tags/jsoncpp/0.5.0/SConstruct b/tags/jsoncpp/0.5.0/SConstruct new file mode 100644 index 0000000..0499db9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/SConstruct @@ -0,0 +1,235 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + vars = {} + for name in ('PATH', 'TEMP', 'TMP'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.5.0/devtools/__init__.py b/tags/jsoncpp/0.5.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/devtools/antglob.py b/tags/jsoncpp/0.5.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.5.0/devtools/fixeol.py b/tags/jsoncpp/0.5.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.5.0/devtools/tarball.py b/tags/jsoncpp/0.5.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.5.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.5.0/doc/doxyfile.in b/tags/jsoncpp/0.5.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.5.0/doc/footer.html b/tags/jsoncpp/0.5.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.5.0/doc/header.html b/tags/jsoncpp/0.5.0/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.5.0/doc/jsoncpp.dox b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox new file mode 100644 index 0000000..abaac6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/jsoncpp.dox @@ -0,0 +1,116 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _plinks Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.5.0/doc/readme.txt b/tags/jsoncpp/0.5.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.5.0/doc/roadmap.dox b/tags/jsoncpp/0.5.0/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/tags/jsoncpp/0.5.0/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.5.0/doxybuild.py b/tags/jsoncpp/0.5.0/doxybuild.py new file mode 100644 index 0000000..0a2a6c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/doxybuild.py @@ -0,0 +1,167 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/include/json/autolink.h b/tags/jsoncpp/0.5.0/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/config.h b/tags/jsoncpp/0.5.0/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/features.h b/tags/jsoncpp/0.5.0/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/forwards.h b/tags/jsoncpp/0.5.0/include/json/forwards.h new file mode 100644 index 0000000..d0ce830 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/forwards.h @@ -0,0 +1,39 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueAllocator; + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/json.h b/tags/jsoncpp/0.5.0/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/reader.h b/tags/jsoncpp/0.5.0/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/value.h b/tags/jsoncpp/0.5.0/include/json/value.h new file mode 100644 index 0000000..58bfd88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/value.h @@ -0,0 +1,1069 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + /** \brief Experimental do not use: Allocator to customize member name and string value memory management done by Value. + * + * - makeMemberName() and releaseMemberName() are called to respectively duplicate and + * free an Json::objectValue member name. + * - duplicateStringValue() and releaseStringValue() are called similarly to + * duplicate and free a Json::stringValue value. + */ + class ValueAllocator + { + public: + enum { unknown = (unsigned)-1 }; + + virtual ~ValueAllocator(); + + virtual char *makeMemberName( const char *memberName ) = 0; + virtual void releaseMemberName( char *memberName ) = 0; + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) = 0; + virtual void releaseStringValue( char *value ) = 0; + }; + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/include/json/writer.h b/tags/jsoncpp/0.5.0/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/tags/jsoncpp/0.5.0/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.5.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.5.0/makerelease.py b/tags/jsoncpp/0.5.0/makerelease.py new file mode 100644 index 0000000..b760fae --- /dev/null +++ b/tags/jsoncpp/0.5.0/makerelease.py @@ -0,0 +1,368 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/scons-tools/globtool.py b/tags/jsoncpp/0.5.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/srcdist.py b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.5.0/scons-tools/substinfile.py b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.5.0/scons-tools/targz.py b/tags/jsoncpp/0.5.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.5.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..1977148 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = valueAllocator()->makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..4eb2d11 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_reader.cpp @@ -0,0 +1,885 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + +static std::string codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..573205f --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_value.cpp @@ -0,0 +1,1718 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +// A "safe" implementation of strdup. Allow null pointer to be passed. +// Also avoid warning on msvc80. +// +//inline char *safeStringDup( const char *czstring ) +//{ +// if ( czstring ) +// { +// const size_t length = (unsigned int)( strlen(czstring) + 1 ); +// char *newString = static_cast( malloc( length ) ); +// memcpy( newString, czstring, length ); +// return newString; +// } +// return 0; +//} +// +//inline char *safeStringDup( const std::string &str ) +//{ +// if ( !str.empty() ) +// { +// const size_t length = str.length(); +// char *newString = static_cast( malloc( length + 1 ) ); +// memcpy( newString, str.c_str(), length ); +// newString[length] = 0; +// return newString; +// } +// return 0; +//} + +ValueAllocator::~ValueAllocator() +{ +} + +class DefaultValueAllocator : public ValueAllocator +{ +public: + virtual ~DefaultValueAllocator() + { + } + + virtual char *makeMemberName( const char *memberName ) + { + return duplicateStringValue( memberName ); + } + + virtual void releaseMemberName( char *memberName ) + { + releaseStringValue( memberName ); + } + + virtual char *duplicateStringValue( const char *value, + unsigned int length = unknown ) + { + //@todo invesgate this old optimization + //if ( !value || value[0] == 0 ) + // return 0; + + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; + } + + virtual void releaseStringValue( char *value ) + { + if ( value ) + free( value ); + } +}; + +static ValueAllocator *&valueAllocator() +{ + static DefaultValueAllocator defaultAllocator; + static ValueAllocator *valueAllocator = &defaultAllocator; + return valueAllocator; +} + +static struct DummyValueAllocatorInitializer { + DummyValueAllocatorInitializer() + { + valueAllocator(); // ensure valueAllocator() statics are initialized before main(). + } +} dummyValueAllocatorInitializer; + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + valueAllocator()->releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = valueAllocator()->duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? valueAllocator()->makeMemberName( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + valueAllocator()->releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..cdf4188 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/json_writer.cpp @@ -0,0 +1,829 @@ +#include +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} +static void uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.5.0/src/lib_json/sconscript b/tags/jsoncpp/0.5.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.5.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.5.0/test/cleantests.py b/tags/jsoncpp/0.5.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.expected b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_01.json b/tags/jsoncpp/0.5.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.expected b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_02.json b/tags/jsoncpp/0.5.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.expected b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_03.json b/tags/jsoncpp/0.5.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.expected b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_04.json b/tags/jsoncpp/0.5.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.expected b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_05.json b/tags/jsoncpp/0.5.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.expected b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.5.0/test/data/test_array_06.json b/tags/jsoncpp/0.5.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_01.json b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_02.json b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_03.json b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_04.json b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_05.json b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_06.json b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_07.json b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_08.json b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_basic_09.json b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.5.0/test/data/test_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_complex_01.json b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_01.json b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_02.json b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_03.json b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_04.json b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_integer_05.json b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.expected b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.5.0/test/data/test_large_01.json b/tags/jsoncpp/0.5.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.expected b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_01.json b/tags/jsoncpp/0.5.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.expected b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_02.json b/tags/jsoncpp/0.5.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.expected b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_03.json b/tags/jsoncpp/0.5.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.expected b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.5.0/test/data/test_object_04.json b/tags/jsoncpp/0.5.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.expected b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_01.json b/tags/jsoncpp/0.5.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.expected b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_02.json b/tags/jsoncpp/0.5.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.expected b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_03.json b/tags/jsoncpp/0.5.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.expected b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_04.json b/tags/jsoncpp/0.5.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.expected b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_05.json b/tags/jsoncpp/0.5.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.expected b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_06.json b/tags/jsoncpp/0.5.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.expected b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.5.0/test/data/test_real_07.json b/tags/jsoncpp/0.5.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/generate_expected.py b/tags/jsoncpp/0.5.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.5.0/test/runjsontests.py b/tags/jsoncpp/0.5.0/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/test/rununittests.py b/tags/jsoncpp/0.5.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.5.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.5.0/version b/tags/jsoncpp/0.5.0/version new file mode 100644 index 0000000..79a2734 --- /dev/null +++ b/tags/jsoncpp/0.5.0/version @@ -0,0 +1 @@ +0.5.0 \ No newline at end of file From 85cb278f1729919fcda7a1f497e648987c904132 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Mar 2010 07:46:20 +0000 Subject: [PATCH 139/268] Released 0.5.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@139 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 79a2734..7defe1e 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.5.0 \ No newline at end of file +0.6.0-dev \ No newline at end of file From b7979f8c72cd3e78094d9b6a4d25c42e02bf6e61 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Mar 2010 10:17:46 +0000 Subject: [PATCH 140/268] - fixed project links section name git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@140 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/jsoncpp.dox | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index abaac6c..02b18fb 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -82,7 +82,7 @@ std::cin >> root["subtree"]; std::cout << root; \endcode -\section _plinks Build instructions +\section _pbuild Build instructions The build instructions are located in the file README.txt in the top-directory of the project. From 4ecd0393f70a43fa87d6c0db2b7455f6c44cf82c Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 07:55:46 +0000 Subject: [PATCH 141/268] - add LD_LIBRARY_PATH to propagated environment variables as it is required for some compiler installations. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@141 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/SConstruct | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index 0499db9..d652887 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -57,8 +57,9 @@ def make_environ_vars(): """Returns a dictionnary with environment variable to use when compiling.""" # PATH is required to find the compiler # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler vars = {} - for name in ('PATH', 'TEMP', 'TMP'): + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): if name in os.environ: vars[name] = os.environ[name] return vars From 86d72eeffb7c76d0aa790417de5777c18f1b4f12 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 07:59:07 +0000 Subject: [PATCH 142/268] - better execution examples git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@142 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/makerelease.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index b760fae..8131cf4 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -3,7 +3,10 @@ Requires Python 2.6 Example of invocation (use to test the script): -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev +python makerelease.py --platform=msvc6,msvc71,msvc80,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.5.0 test-0.6.0-dev Example of invocation when doing a release: python makerelease.py 0.5.0 0.6.0-dev From d4528fce28a37ed3a6e5569f0eb8daf437901e47 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 10:59:50 +0000 Subject: [PATCH 143/268] - added support for compilation using Microsoft Visual Studio 2008 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@143 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 7 ++++++- trunk/jsoncpp/SConstruct | 14 +++++++++++++- trunk/jsoncpp/makerelease.py | 2 +- 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index ed7ef8f..424e42d 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -35,9 +35,14 @@ where PLTFRM may be one of: msvc70 Microsoft Visual Studio 2002 msvc71 Microsoft Visual Studio 2003 msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. -adding platform is fairly simple. You need to change the Sconstruct file +Adding platform is fairly simple. You need to change the Sconstruct file to do so. and TARGET may be: diff --git a/trunk/jsoncpp/SConstruct b/trunk/jsoncpp/SConstruct index d652887..23225cb 100644 --- a/trunk/jsoncpp/SConstruct +++ b/trunk/jsoncpp/SConstruct @@ -18,7 +18,7 @@ options = Variables() options.Add( EnumVariable('platform', 'Platform (compiler/stl) used to build the project', 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 linux-gcc'.split(), + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), ignorecase=2) ) try: @@ -102,6 +102,18 @@ elif platform == 'msvc80': for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: env.Tool( tool ) env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' elif platform == 'mingw': env.Tool( 'mingw' ) env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index 8131cf4..9b59180 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -3,7 +3,7 @@ Requires Python 2.6 Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,mingw -ublep 0.6.0 0.7.0-dev +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev When testing this script: python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.5.0 test-0.6.0-dev From e77580a0097e8727c8f05064e35c714a695e39eb Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 11:14:49 +0000 Subject: [PATCH 144/268] Added NEWS.txt that provides a synopsis of the change since the last version. Integrated NEWS.txt in documentation. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@144 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 15 +++++++++++++++ trunk/jsoncpp/doc/jsoncpp.dox | 7 +++++++ trunk/jsoncpp/doxybuild.py | 1 + 3 files changed, 23 insertions(+) create mode 100644 trunk/jsoncpp/NEWS.txt diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt new file mode 100644 index 0000000..184ad93 --- /dev/null +++ b/trunk/jsoncpp/NEWS.txt @@ -0,0 +1,15 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now propagated to the build + environment as this is required for some compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 02b18fb..1cde6ff 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -100,6 +100,13 @@ http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ To checkout the source, see the following instructions. +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + \section _plinks Project links - json-cpp home - json-cpp sourceforge project diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py index 0a2a6c7..0a31aae 100644 --- a/trunk/jsoncpp/doxybuild.py +++ b/trunk/jsoncpp/doxybuild.py @@ -127,6 +127,7 @@ def yesno( bool ): tarball_sources = [ output_dir, 'README.txt', + 'NEWS.txt', 'version' ] tarball_basedir = os.path.join( output_dir, html_output_dirname ) From 091d81a9bd475675724e6be22921535a2e24f746 Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 12:24:38 +0000 Subject: [PATCH 145/268] - extracted some utility functions out-of reader and parser. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@145 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 37 +--------- trunk/jsoncpp/src/lib_json/json_tool.h | 78 ++++++++++++++++++++++ trunk/jsoncpp/src/lib_json/json_writer.cpp | 19 +----- 3 files changed, 81 insertions(+), 53 deletions(-) create mode 100644 trunk/jsoncpp/src/lib_json/json_tool.h diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 4eb2d11..da8e83b 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -1,5 +1,6 @@ #include #include +#include "json_tool.h" #include #include #include @@ -66,42 +67,6 @@ containsNewLine( Reader::Location begin, return false; } -static std::string codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - // Class Reader // ////////////////////////////////////////////////////////////////// diff --git a/trunk/jsoncpp/src/lib_json/json_tool.h b/trunk/jsoncpp/src/lib_json/json_tool.h new file mode 100644 index 0000000..ca4ea4f --- /dev/null +++ b/trunk/jsoncpp/src/lib_json/json_tool.h @@ -0,0 +1,78 @@ +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. Must have at least 10 chars free. + */ +static inline void +uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index cdf4188..3b926e6 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -1,4 +1,5 @@ #include +#include "json_tool.h" #include #include #include @@ -13,11 +14,6 @@ namespace Json { -static bool isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - static bool containsControlCharacter( const char* str ) { while ( *str ) @@ -27,17 +23,6 @@ static bool containsControlCharacter( const char* str ) } return false; } -static void uintToString( unsigned int value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = (value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} std::string valueToString( Int value ) { @@ -116,7 +101,7 @@ std::string valueToQuotedString( const char *value ) // We have to walk value and escape any special characters. // Appending to std::string is not efficient, but this should be rare. // (Note: forward slashes are *not* rare, but I am not escaping them.) - unsigned maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL std::string result; result.reserve(maxsize); // to avoid lots of mallocs result += "\""; From e4d5d867396ab24b7afec6123e25ac22eef0d80d Mon Sep 17 00:00:00 2001 From: blep Date: Sat, 13 Mar 2010 13:10:27 +0000 Subject: [PATCH 146/268] Removed experimental ValueAllocator, it caused static initialization/destruction order issues (bug #2934500). The DefaultValueAllocator has been inlined in code. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@146 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 6 + trunk/jsoncpp/include/json/forwards.h | 1 - trunk/jsoncpp/include/json/value.h | 19 --- .../jsoncpp/src/lib_json/json_internalmap.inl | 2 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 130 ++++++------------ 5 files changed, 46 insertions(+), 112 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 184ad93..b5e8cf6 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -12,4 +12,10 @@ Notes: you need to setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt in start menu) before running scons. +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index d0ce830..815075e 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -26,7 +26,6 @@ namespace Json { class ValueIterator; class ValueConstIterator; #ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueAllocator; class ValueMapAllocator; class ValueInternalLink; class ValueInternalArray; diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 58bfd88..5d1bc81 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -513,26 +513,7 @@ namespace Json { Args args_; }; - /** \brief Experimental do not use: Allocator to customize member name and string value memory management done by Value. - * - * - makeMemberName() and releaseMemberName() are called to respectively duplicate and - * free an Json::objectValue member name. - * - duplicateStringValue() and releaseStringValue() are called similarly to - * duplicate and free a Json::stringValue value. - */ - class ValueAllocator - { - public: - enum { unknown = (unsigned)-1 }; - - virtual ~ValueAllocator(); - virtual char *makeMemberName( const char *memberName ) = 0; - virtual void releaseMemberName( char *memberName ) = 0; - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) = 0; - virtual void releaseStringValue( char *value ) = 0; - }; #ifdef JSON_VALUE_USE_INTERNAL_MAP /** \brief Allocator to customize Value internal map. diff --git a/trunk/jsoncpp/src/lib_json/json_internalmap.inl b/trunk/jsoncpp/src/lib_json/json_internalmap.inl index 1977148..bade5d5 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalmap.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalmap.inl @@ -415,7 +415,7 @@ ValueInternalMap::setNewItem( const char *key, ValueInternalLink *link, BucketIndex index ) { - char *duplicatedKey = valueAllocator()->makeMemberName( key ); + char *duplicatedKey = makeMemberName( key ); ++itemCount_; link->keys_[index] = duplicatedKey; link->items_[index].setItemUsed(); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 573205f..1ccf70f 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -24,91 +24,39 @@ const Int Value::minInt = Int( ~(UInt(-1)/2) ); const Int Value::maxInt = Int( UInt(-1)/2 ); const UInt Value::maxUInt = UInt(-1); -// A "safe" implementation of strdup. Allow null pointer to be passed. -// Also avoid warning on msvc80. -// -//inline char *safeStringDup( const char *czstring ) -//{ -// if ( czstring ) -// { -// const size_t length = (unsigned int)( strlen(czstring) + 1 ); -// char *newString = static_cast( malloc( length ) ); -// memcpy( newString, czstring, length ); -// return newString; -// } -// return 0; -//} -// -//inline char *safeStringDup( const std::string &str ) -//{ -// if ( !str.empty() ) -// { -// const size_t length = str.length(); -// char *newString = static_cast( malloc( length + 1 ) ); -// memcpy( newString, str.c_str(), length ); -// newString[length] = 0; -// return newString; -// } -// return 0; -//} +/// Unknown size marker +enum { unknown = (unsigned)-1 }; -ValueAllocator::~ValueAllocator() -{ -} -class DefaultValueAllocator : public ValueAllocator +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) { -public: - virtual ~DefaultValueAllocator() - { - } - - virtual char *makeMemberName( const char *memberName ) - { - return duplicateStringValue( memberName ); - } - - virtual void releaseMemberName( char *memberName ) - { - releaseStringValue( memberName ); - } - - virtual char *duplicateStringValue( const char *value, - unsigned int length = unknown ) - { - //@todo invesgate this old optimization - //if ( !value || value[0] == 0 ) - // return 0; - - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; - } + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} - virtual void releaseStringValue( char *value ) - { - if ( value ) - free( value ); - } -}; -static ValueAllocator *&valueAllocator() +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) { - static DefaultValueAllocator defaultAllocator; - static ValueAllocator *valueAllocator = &defaultAllocator; - return valueAllocator; + if ( value ) + free( value ); } -static struct DummyValueAllocatorInitializer { - DummyValueAllocatorInitializer() - { - valueAllocator(); // ensure valueAllocator() statics are initialized before main(). - } -} dummyValueAllocatorInitializer; - // ////////////////////////////////////////////////////////////////// @@ -143,7 +91,7 @@ Value::CommentInfo::CommentInfo() Value::CommentInfo::~CommentInfo() { if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); + releaseStringValue( comment_ ); } @@ -151,11 +99,11 @@ void Value::CommentInfo::setComment( const char *text ) { if ( comment_ ) - valueAllocator()->releaseStringValue( comment_ ); + releaseStringValue( comment_ ); JSON_ASSERT( text ); JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); // It seems that /**/ style comments are acceptable as well. - comment_ = valueAllocator()->duplicateStringValue( text ); + comment_ = duplicateStringValue( text ); } @@ -178,7 +126,7 @@ Value::CZString::CZString( int index ) } Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? valueAllocator()->makeMemberName(cstr) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) : cstr ) , index_( allocate ) { @@ -186,7 +134,7 @@ Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) Value::CZString::CZString( const CZString &other ) : cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? valueAllocator()->makeMemberName( other.cstr_ ) + ? duplicateStringValue( other.cstr_ ) : other.cstr_ ) , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) : other.index_ ) @@ -196,7 +144,7 @@ Value::CZString::CZString( const CZString &other ) Value::CZString::~CZString() { if ( cstr_ && index_ == duplicate ) - valueAllocator()->releaseMemberName( const_cast( cstr_ ) ); + releaseStringValue( const_cast( cstr_ ) ); } void @@ -348,7 +296,7 @@ Value::Value( const char *value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = valueAllocator()->duplicateStringValue( value ); + value_.string_ = duplicateStringValue( value ); } @@ -361,8 +309,8 @@ Value::Value( const char *beginValue, , itemIsUsed_( 0 ) #endif { - value_.string_ = valueAllocator()->duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); + value_.string_ = duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); } @@ -374,8 +322,8 @@ Value::Value( const std::string &value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = valueAllocator()->duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); } @@ -400,7 +348,7 @@ Value::Value( const CppTL::ConstString &value ) , itemIsUsed_( 0 ) #endif { - value_.string_ = valueAllocator()->duplicateStringValue( value, value.length() ); + value_.string_ = duplicateStringValue( value, value.length() ); } # endif @@ -434,7 +382,7 @@ Value::Value( const Value &other ) case stringValue: if ( other.value_.string_ ) { - value_.string_ = valueAllocator()->duplicateStringValue( other.value_.string_ ); + value_.string_ = duplicateStringValue( other.value_.string_ ); allocated_ = true; } else @@ -481,7 +429,7 @@ Value::~Value() break; case stringValue: if ( allocated_ ) - valueAllocator()->releaseStringValue( value_.string_ ); + releaseStringValue( value_.string_ ); break; #ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: From 3320755286b6290b6a1e33ffa1c609442c823c79 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 23 Mar 2010 00:10:35 +0000 Subject: [PATCH 147/268] Just testing svn commit. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@147 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- branches/jsoncpp/cdunn-146/AUTHORS | 1 + branches/jsoncpp/cdunn-146/LICENSE | 1 + branches/jsoncpp/cdunn-146/NEWS.txt | 21 + branches/jsoncpp/cdunn-146/README.txt | 122 + branches/jsoncpp/cdunn-146/SConstruct | 248 ++ .../jsoncpp/cdunn-146/devtools/__init__.py | 1 + .../jsoncpp/cdunn-146/devtools/antglob.py | 201 ++ branches/jsoncpp/cdunn-146/devtools/fixeol.py | 63 + .../jsoncpp/cdunn-146/devtools/tarball.py | 53 + branches/jsoncpp/cdunn-146/doc/doxyfile.in | 1534 ++++++++++++ branches/jsoncpp/cdunn-146/doc/footer.html | 23 + branches/jsoncpp/cdunn-146/doc/header.html | 24 + branches/jsoncpp/cdunn-146/doc/jsoncpp.dox | 123 + branches/jsoncpp/cdunn-146/doc/readme.txt | 1 + branches/jsoncpp/cdunn-146/doc/roadmap.dox | 32 + branches/jsoncpp/cdunn-146/doxybuild.py | 168 ++ .../jsoncpp/cdunn-146/include/json/autolink.h | 19 + .../jsoncpp/cdunn-146/include/json/config.h | 43 + .../jsoncpp/cdunn-146/include/json/features.h | 42 + .../jsoncpp/cdunn-146/include/json/forwards.h | 38 + .../jsoncpp/cdunn-146/include/json/json.h | 10 + .../jsoncpp/cdunn-146/include/json/reader.h | 196 ++ .../jsoncpp/cdunn-146/include/json/value.h | 1050 ++++++++ .../jsoncpp/cdunn-146/include/json/writer.h | 174 ++ .../cdunn-146/makefiles/vs71/jsoncpp.sln | 46 + .../cdunn-146/makefiles/vs71/jsontest.vcproj | 119 + .../cdunn-146/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + branches/jsoncpp/cdunn-146/makerelease.py | 371 +++ .../jsoncpp/cdunn-146/scons-tools/globtool.py | 53 + .../jsoncpp/cdunn-146/scons-tools/srcdist.py | 179 ++ .../cdunn-146/scons-tools/substinfile.py | 79 + .../jsoncpp/cdunn-146/scons-tools/targz.py | 82 + .../cdunn-146/src/jsontestrunner/main.cpp | 233 ++ .../cdunn-146/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 125 + .../src/lib_json/json_internalarray.inl | 448 ++++ .../src/lib_json/json_internalmap.inl | 607 +++++ .../cdunn-146/src/lib_json/json_reader.cpp | 850 +++++++ .../cdunn-146/src/lib_json/json_tool.h | 78 + .../cdunn-146/src/lib_json/json_value.cpp | 1666 +++++++++++++ .../src/lib_json/json_valueiterator.inl | 292 +++ .../cdunn-146/src/lib_json/json_writer.cpp | 814 +++++++ .../jsoncpp/cdunn-146/src/lib_json/sconscript | 8 + .../cdunn-146/src/test_lib_json/jsontest.cpp | 603 +++++ .../cdunn-146/src/test_lib_json/jsontest.h | 254 ++ .../cdunn-146/src/test_lib_json/main.cpp | 244 ++ .../cdunn-146/src/test_lib_json/sconscript | 10 + branches/jsoncpp/cdunn-146/test/cleantests.py | 10 + .../test/data/test_array_01.expected | 1 + .../cdunn-146/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../cdunn-146/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../cdunn-146/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../cdunn-146/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../cdunn-146/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../cdunn-146/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../cdunn-146/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../cdunn-146/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../cdunn-146/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../cdunn-146/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../cdunn-146/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../cdunn-146/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../cdunn-146/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../cdunn-146/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../cdunn-146/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../cdunn-146/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../cdunn-146/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../cdunn-146/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../cdunn-146/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../cdunn-146/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../cdunn-146/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../cdunn-146/test/data/test_integer_05.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../cdunn-146/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../cdunn-146/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../cdunn-146/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../cdunn-146/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../cdunn-146/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../cdunn-146/test/data/test_real_01.expected | 2 + .../cdunn-146/test/data/test_real_01.json | 3 + .../cdunn-146/test/data/test_real_02.expected | 2 + .../cdunn-146/test/data/test_real_02.json | 3 + .../cdunn-146/test/data/test_real_03.expected | 2 + .../cdunn-146/test/data/test_real_03.json | 3 + .../cdunn-146/test/data/test_real_04.expected | 2 + .../cdunn-146/test/data/test_real_04.json | 3 + .../cdunn-146/test/data/test_real_05.expected | 3 + .../cdunn-146/test/data/test_real_05.json | 3 + .../cdunn-146/test/data/test_real_06.expected | 3 + .../cdunn-146/test/data/test_real_06.json | 3 + .../cdunn-146/test/data/test_real_07.expected | 3 + .../cdunn-146/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../cdunn-146/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../cdunn-146/test/data/test_string_02.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../cdunn-146/test/generate_expected.py | 11 + .../cdunn-146/test/jsonchecker/fail1.json | 1 + .../cdunn-146/test/jsonchecker/fail10.json | 1 + .../cdunn-146/test/jsonchecker/fail11.json | 1 + .../cdunn-146/test/jsonchecker/fail12.json | 1 + .../cdunn-146/test/jsonchecker/fail13.json | 1 + .../cdunn-146/test/jsonchecker/fail14.json | 1 + .../cdunn-146/test/jsonchecker/fail15.json | 1 + .../cdunn-146/test/jsonchecker/fail16.json | 1 + .../cdunn-146/test/jsonchecker/fail17.json | 1 + .../cdunn-146/test/jsonchecker/fail18.json | 1 + .../cdunn-146/test/jsonchecker/fail19.json | 1 + .../cdunn-146/test/jsonchecker/fail2.json | 1 + .../cdunn-146/test/jsonchecker/fail20.json | 1 + .../cdunn-146/test/jsonchecker/fail21.json | 1 + .../cdunn-146/test/jsonchecker/fail22.json | 1 + .../cdunn-146/test/jsonchecker/fail23.json | 1 + .../cdunn-146/test/jsonchecker/fail24.json | 1 + .../cdunn-146/test/jsonchecker/fail25.json | 1 + .../cdunn-146/test/jsonchecker/fail26.json | 1 + .../cdunn-146/test/jsonchecker/fail27.json | 2 + .../cdunn-146/test/jsonchecker/fail28.json | 2 + .../cdunn-146/test/jsonchecker/fail29.json | 1 + .../cdunn-146/test/jsonchecker/fail3.json | 1 + .../cdunn-146/test/jsonchecker/fail30.json | 1 + .../cdunn-146/test/jsonchecker/fail31.json | 1 + .../cdunn-146/test/jsonchecker/fail32.json | 1 + .../cdunn-146/test/jsonchecker/fail33.json | 1 + .../cdunn-146/test/jsonchecker/fail4.json | 1 + .../cdunn-146/test/jsonchecker/fail5.json | 1 + .../cdunn-146/test/jsonchecker/fail6.json | 1 + .../cdunn-146/test/jsonchecker/fail7.json | 1 + .../cdunn-146/test/jsonchecker/fail8.json | 1 + .../cdunn-146/test/jsonchecker/fail9.json | 1 + .../cdunn-146/test/jsonchecker/pass1.json | 58 + .../cdunn-146/test/jsonchecker/pass2.json | 1 + .../cdunn-146/test/jsonchecker/pass3.json | 6 + .../cdunn-146/test/jsonchecker/readme.txt | 3 + .../cdunn-146/test/pyjsontestrunner.py | 64 + .../jsoncpp/cdunn-146/test/runjsontests.py | 134 ++ .../jsoncpp/cdunn-146/test/rununittests.py | 73 + branches/jsoncpp/cdunn-146/version | 1 + 175 files changed, 14547 insertions(+) create mode 100644 branches/jsoncpp/cdunn-146/AUTHORS create mode 100644 branches/jsoncpp/cdunn-146/LICENSE create mode 100644 branches/jsoncpp/cdunn-146/NEWS.txt create mode 100644 branches/jsoncpp/cdunn-146/README.txt create mode 100644 branches/jsoncpp/cdunn-146/SConstruct create mode 100644 branches/jsoncpp/cdunn-146/devtools/__init__.py create mode 100644 branches/jsoncpp/cdunn-146/devtools/antglob.py create mode 100644 branches/jsoncpp/cdunn-146/devtools/fixeol.py create mode 100644 branches/jsoncpp/cdunn-146/devtools/tarball.py create mode 100644 branches/jsoncpp/cdunn-146/doc/doxyfile.in create mode 100644 branches/jsoncpp/cdunn-146/doc/footer.html create mode 100644 branches/jsoncpp/cdunn-146/doc/header.html create mode 100644 branches/jsoncpp/cdunn-146/doc/jsoncpp.dox create mode 100644 branches/jsoncpp/cdunn-146/doc/readme.txt create mode 100644 branches/jsoncpp/cdunn-146/doc/roadmap.dox create mode 100644 branches/jsoncpp/cdunn-146/doxybuild.py create mode 100644 branches/jsoncpp/cdunn-146/include/json/autolink.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/config.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/features.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/forwards.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/json.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/reader.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/value.h create mode 100644 branches/jsoncpp/cdunn-146/include/json/writer.h create mode 100644 branches/jsoncpp/cdunn-146/makefiles/vs71/jsoncpp.sln create mode 100644 branches/jsoncpp/cdunn-146/makefiles/vs71/jsontest.vcproj create mode 100644 branches/jsoncpp/cdunn-146/makefiles/vs71/lib_json.vcproj create mode 100644 branches/jsoncpp/cdunn-146/makefiles/vs71/test_lib_json.vcproj create mode 100644 branches/jsoncpp/cdunn-146/makerelease.py create mode 100644 branches/jsoncpp/cdunn-146/scons-tools/globtool.py create mode 100644 branches/jsoncpp/cdunn-146/scons-tools/srcdist.py create mode 100644 branches/jsoncpp/cdunn-146/scons-tools/substinfile.py create mode 100644 branches/jsoncpp/cdunn-146/scons-tools/targz.py create mode 100644 branches/jsoncpp/cdunn-146/src/jsontestrunner/main.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/jsontestrunner/sconscript create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_batchallocator.h create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_internalarray.inl create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_internalmap.inl create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_reader.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_tool.h create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_value.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_valueiterator.inl create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/json_writer.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/lib_json/sconscript create mode 100644 branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.h create mode 100644 branches/jsoncpp/cdunn-146/src/test_lib_json/main.cpp create mode 100644 branches/jsoncpp/cdunn-146/src/test_lib_json/sconscript create mode 100644 branches/jsoncpp/cdunn-146/test/cleantests.py create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_05.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_05.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_06.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_array_06.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_05.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_05.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_06.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_06.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_07.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_07.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_08.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_08.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_09.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_basic_09.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_comment_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_comment_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_complex_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_complex_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_05.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_integer_05.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_large_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_large_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_object_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_05.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_05.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_06.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_06.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_07.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_real_07.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.json create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.expected create mode 100644 branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.json create mode 100644 branches/jsoncpp/cdunn-146/test/generate_expected.py create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail1.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail10.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail11.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail12.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail13.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail14.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail15.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail16.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail17.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail18.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail19.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail2.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail20.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail21.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail22.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail23.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail24.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail25.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail26.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail27.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail28.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail29.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail3.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail30.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail31.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail32.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail33.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail4.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail5.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail6.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail7.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail8.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/fail9.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/pass1.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/pass2.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/pass3.json create mode 100644 branches/jsoncpp/cdunn-146/test/jsonchecker/readme.txt create mode 100644 branches/jsoncpp/cdunn-146/test/pyjsontestrunner.py create mode 100644 branches/jsoncpp/cdunn-146/test/runjsontests.py create mode 100644 branches/jsoncpp/cdunn-146/test/rununittests.py create mode 100644 branches/jsoncpp/cdunn-146/version diff --git a/branches/jsoncpp/cdunn-146/AUTHORS b/branches/jsoncpp/cdunn-146/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/branches/jsoncpp/cdunn-146/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/branches/jsoncpp/cdunn-146/LICENSE b/branches/jsoncpp/cdunn-146/LICENSE new file mode 100644 index 0000000..d20fb29 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/LICENSE @@ -0,0 +1 @@ +The json-cpp library and this documentation are in Public Domain. diff --git a/branches/jsoncpp/cdunn-146/NEWS.txt b/branches/jsoncpp/cdunn-146/NEWS.txt new file mode 100644 index 0000000..b5e8cf6 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/NEWS.txt @@ -0,0 +1,21 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now propagated to the build + environment as this is required for some compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + diff --git a/branches/jsoncpp/cdunn-146/README.txt b/branches/jsoncpp/cdunn-146/README.txt new file mode 100644 index 0000000..424e42d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/README.txt @@ -0,0 +1,122 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp is a simple API to manipulate JSON value, handle serialization +and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. diff --git a/branches/jsoncpp/cdunn-146/SConstruct b/branches/jsoncpp/cdunn-146/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/branches/jsoncpp/cdunn-146/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/branches/jsoncpp/cdunn-146/devtools/__init__.py b/branches/jsoncpp/cdunn-146/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/devtools/antglob.py b/branches/jsoncpp/cdunn-146/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/branches/jsoncpp/cdunn-146/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/branches/jsoncpp/cdunn-146/devtools/fixeol.py b/branches/jsoncpp/cdunn-146/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/branches/jsoncpp/cdunn-146/devtools/tarball.py b/branches/jsoncpp/cdunn-146/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/branches/jsoncpp/cdunn-146/doc/doxyfile.in b/branches/jsoncpp/cdunn-146/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/branches/jsoncpp/cdunn-146/doc/footer.html b/branches/jsoncpp/cdunn-146/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/branches/jsoncpp/cdunn-146/doc/header.html b/branches/jsoncpp/cdunn-146/doc/header.html new file mode 100644 index 0000000..d56ea59 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/branches/jsoncpp/cdunn-146/doc/jsoncpp.dox b/branches/jsoncpp/cdunn-146/doc/jsoncpp.dox new file mode 100644 index 0000000..1cde6ff --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/jsoncpp.dox @@ -0,0 +1,123 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space" = true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormatedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +The json-cpp library and this documentation are in Public Domain. + +\author Baptiste Lepilleur +*/ diff --git a/branches/jsoncpp/cdunn-146/doc/readme.txt b/branches/jsoncpp/cdunn-146/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/branches/jsoncpp/cdunn-146/doc/roadmap.dox b/branches/jsoncpp/cdunn-146/doc/roadmap.dox new file mode 100644 index 0000000..7f3aa1a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doc/roadmap.dox @@ -0,0 +1,32 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + - Release on sourceforge download + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/branches/jsoncpp/cdunn-146/doxybuild.py b/branches/jsoncpp/cdunn-146/doxybuild.py new file mode 100644 index 0000000..0a31aae --- /dev/null +++ b/branches/jsoncpp/cdunn-146/doxybuild.py @@ -0,0 +1,168 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/branches/jsoncpp/cdunn-146/include/json/autolink.h b/branches/jsoncpp/cdunn-146/include/json/autolink.h new file mode 100644 index 0000000..37c9258 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/autolink.h @@ -0,0 +1,19 @@ +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/config.h b/branches/jsoncpp/cdunn-146/include/json/config.h new file mode 100644 index 0000000..5d334cb --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/config.h @@ -0,0 +1,43 @@ +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/features.h b/branches/jsoncpp/cdunn-146/include/json/features.h new file mode 100644 index 0000000..f1404f6 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/features.h @@ -0,0 +1,42 @@ +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/forwards.h b/branches/jsoncpp/cdunn-146/include/json/forwards.h new file mode 100644 index 0000000..815075e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/forwards.h @@ -0,0 +1,38 @@ +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +# include "config.h" + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef int Int; + typedef unsigned int UInt; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/json.h b/branches/jsoncpp/cdunn-146/include/json/json.h new file mode 100644 index 0000000..c71ed65 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/json.h @@ -0,0 +1,10 @@ +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/reader.h b/branches/jsoncpp/cdunn-146/include/json/reader.h new file mode 100644 index 0000000..ee1d6a2 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/reader.h @@ -0,0 +1,196 @@ +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +# include "features.h" +# include "value.h" +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormatedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/value.h b/branches/jsoncpp/cdunn-146/include/json/value.h new file mode 100644 index 0000000..5d1bc81 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/value.h @@ -0,0 +1,1050 @@ +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +# include "forwards.h" +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; + typedef UInt ArrayIndex; + + static const Value null; + static const Int minInt; + static const Int maxInt; + static const UInt maxUInt; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( int index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + int index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + int index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + UInt size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( UInt size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( UInt index ); + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( UInt index ) const; + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( UInt index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( UInt index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + Int int_; + UInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( UInt index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + UInt index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/include/json/writer.h b/branches/jsoncpp/cdunn-146/include/json/writer.h new file mode 100644 index 0000000..5f4b83b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/include/json/writer.h @@ -0,0 +1,174 @@ +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +# include "value.h" +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/makefiles/vs71/jsoncpp.sln b/branches/jsoncpp/cdunn-146/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/branches/jsoncpp/cdunn-146/makefiles/vs71/jsontest.vcproj b/branches/jsoncpp/cdunn-146/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/branches/jsoncpp/cdunn-146/makefiles/vs71/lib_json.vcproj b/branches/jsoncpp/cdunn-146/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/branches/jsoncpp/cdunn-146/makefiles/vs71/test_lib_json.vcproj b/branches/jsoncpp/cdunn-146/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/branches/jsoncpp/cdunn-146/makerelease.py b/branches/jsoncpp/cdunn-146/makerelease.py new file mode 100644 index 0000000..9b59180 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/makerelease.py @@ -0,0 +1,371 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.5.0 test-0.6.0-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/branches/jsoncpp/cdunn-146/scons-tools/globtool.py b/branches/jsoncpp/cdunn-146/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/branches/jsoncpp/cdunn-146/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/branches/jsoncpp/cdunn-146/scons-tools/srcdist.py b/branches/jsoncpp/cdunn-146/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/branches/jsoncpp/cdunn-146/scons-tools/substinfile.py b/branches/jsoncpp/cdunn-146/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/branches/jsoncpp/cdunn-146/scons-tools/targz.py b/branches/jsoncpp/cdunn-146/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/branches/jsoncpp/cdunn-146/src/jsontestrunner/main.cpp b/branches/jsoncpp/cdunn-146/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..231ee0c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/jsontestrunner/main.cpp @@ -0,0 +1,233 @@ +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormatedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + + return exitCode; +} + diff --git a/branches/jsoncpp/cdunn-146/src/jsontestrunner/sconscript b/branches/jsoncpp/cdunn-146/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_batchallocator.h b/branches/jsoncpp/cdunn-146/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..87ea5ed --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_batchallocator.h @@ -0,0 +1,125 @@ +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_internalarray.inl b/branches/jsoncpp/cdunn-146/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..9b985d2 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_internalarray.inl @@ -0,0 +1,448 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_internalmap.inl b/branches/jsoncpp/cdunn-146/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..bade5d5 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_internalmap.inl @@ -0,0 +1,607 @@ +// included by json_value.cpp +// everything is within Json namespace + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_reader.cpp b/branches/jsoncpp/cdunn-146/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..da8e83b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_reader.cpp @@ -0,0 +1,850 @@ +#include +#include +#include "json_tool.h" +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ == tokenArraySeparator && + token.type_ == tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt) / 10; + Value::UInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + if ( value >= threshold ) + return decodeDouble( token ); + value = value * 10 + Value::UInt(c - '0'); + } + if ( isNegative ) + currentValue() = -Value::Int( value ); + else if ( value <= Value::UInt(Value::maxInt) ) + currentValue() = Value::Int( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +std::string +Reader::getFormatedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_tool.h b/branches/jsoncpp/cdunn-146/src/lib_json/json_tool.h new file mode 100644 index 0000000..ca4ea4f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_tool.h @@ -0,0 +1,78 @@ +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. Must have at least 10 chars free. + */ +static inline void +uintToString( unsigned int value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = (value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_value.cpp b/branches/jsoncpp/cdunn-146/src/lib_json/json_value.cpp new file mode 100644 index 0000000..1ccf70f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_value.cpp @@ -0,0 +1,1666 @@ +#include +#include +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t +#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +#endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( int index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +int +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + UInt(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +Value::UInt +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return Int( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( UInt newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + UInt oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( UInt index = newSize; index < oldSize; ++index ) + value_.map_->erase( index ); + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( UInt index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +const Value & +Value::operator[]( UInt index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( UInt index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( UInt index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( Value::UInt index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + Value::UInt index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + Value::UInt(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_valueiterator.inl b/branches/jsoncpp/cdunn-146/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..736e260 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_valueiterator.inl @@ -0,0 +1,292 @@ +// included by json_value.cpp +// everything is within Json namespace + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/json_writer.cpp b/branches/jsoncpp/cdunn-146/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..3b926e6 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/json_writer.cpp @@ -0,0 +1,814 @@ +#include +#include "json_tool.h" +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + +std::string valueToString( Int value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( UInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( UInt value ) +{ + char buffer[32]; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asInt() ); + break; + case uintValue: + document_ += valueToString( value.asUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/branches/jsoncpp/cdunn-146/src/lib_json/sconscript b/branches/jsoncpp/cdunn-146/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.cpp b/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..a07d0fe --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.cpp @@ -0,0 +1,603 @@ +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.h b/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..8f0bd31 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/test_lib_json/jsontest.h @@ -0,0 +1,254 @@ +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( condition ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/branches/jsoncpp/cdunn-146/src/test_lib_json/main.cpp b/branches/jsoncpp/cdunn-146/src/test_lib_json/main.cpp new file mode 100644 index 0000000..b80776d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/test_lib_json/main.cpp @@ -0,0 +1,244 @@ +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + return runner.runCommandLine( argc, argv ); +} diff --git a/branches/jsoncpp/cdunn-146/src/test_lib_json/sconscript b/branches/jsoncpp/cdunn-146/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/branches/jsoncpp/cdunn-146/test/cleantests.py b/branches/jsoncpp/cdunn-146/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_01.json b/branches/jsoncpp/cdunn-146/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_02.json b/branches/jsoncpp/cdunn-146/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_03.json b/branches/jsoncpp/cdunn-146/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_04.json b/branches/jsoncpp/cdunn-146/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_05.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_05.json b/branches/jsoncpp/cdunn-146/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_06.expected b/branches/jsoncpp/cdunn-146/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_array_06.json b/branches/jsoncpp/cdunn-146/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_01.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_02.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_03.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_04.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_05.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_05.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_06.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_06.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_07.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_07.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_08.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_08.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_09.expected b/branches/jsoncpp/cdunn-146/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_basic_09.json b/branches/jsoncpp/cdunn-146/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_comment_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_comment_01.json b/branches/jsoncpp/cdunn-146/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_complex_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_complex_01.json b/branches/jsoncpp/cdunn-146/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_01.json b/branches/jsoncpp/cdunn-146/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_02.json b/branches/jsoncpp/cdunn-146/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_03.json b/branches/jsoncpp/cdunn-146/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_04.json b/branches/jsoncpp/cdunn-146/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_05.expected b/branches/jsoncpp/cdunn-146/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_integer_05.json b/branches/jsoncpp/cdunn-146/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_large_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_large_01.json b/branches/jsoncpp/cdunn-146/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_01.json b/branches/jsoncpp/cdunn-146/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_02.json b/branches/jsoncpp/cdunn-146/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_03.json b/branches/jsoncpp/cdunn-146/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_object_04.json b/branches/jsoncpp/cdunn-146/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.json b/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_01.json b/branches/jsoncpp/cdunn-146/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_02.json b/branches/jsoncpp/cdunn-146/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_03.json b/branches/jsoncpp/cdunn-146/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_04.json b/branches/jsoncpp/cdunn-146/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_05.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_05.json b/branches/jsoncpp/cdunn-146/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_06.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_06.json b/branches/jsoncpp/cdunn-146/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_07.expected b/branches/jsoncpp/cdunn-146/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_real_07.json b/branches/jsoncpp/cdunn-146/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_01.json b/branches/jsoncpp/cdunn-146/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_02.json b/branches/jsoncpp/cdunn-146/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.json b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.json b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.json b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.json b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.expected b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.json b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/generate_expected.py b/branches/jsoncpp/cdunn-146/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail1.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail10.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail11.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail12.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail13.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail14.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail15.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail16.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail17.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail18.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail19.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail2.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail20.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail21.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail22.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail23.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail24.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail25.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail26.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail27.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail28.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail29.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail3.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail30.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail31.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail32.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail33.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail4.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail5.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail6.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail7.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail8.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/fail9.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/pass1.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/pass2.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/pass3.json b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/branches/jsoncpp/cdunn-146/test/jsonchecker/readme.txt b/branches/jsoncpp/cdunn-146/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/branches/jsoncpp/cdunn-146/test/pyjsontestrunner.py b/branches/jsoncpp/cdunn-146/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/branches/jsoncpp/cdunn-146/test/runjsontests.py b/branches/jsoncpp/cdunn-146/test/runjsontests.py new file mode 100644 index 0000000..800337d --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + is_json_checker_test = input_path in test_jsonchecker + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/branches/jsoncpp/cdunn-146/test/rununittests.py b/branches/jsoncpp/cdunn-146/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/branches/jsoncpp/cdunn-146/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/branches/jsoncpp/cdunn-146/version b/branches/jsoncpp/cdunn-146/version new file mode 100644 index 0000000..7defe1e --- /dev/null +++ b/branches/jsoncpp/cdunn-146/version @@ -0,0 +1 @@ +0.6.0-dev \ No newline at end of file From 9fdd34b5d3b260ad75d20ed183a0bcf6f9713ecb Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 14 Apr 2010 13:17:26 +0000 Subject: [PATCH 148/268] - added need for 64 bits integer to roadmap git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@148 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index 7f3aa1a..d38ebe8 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -1,4 +1,14 @@ /*! \page roadmap JsonCpp roadmap + \section ms_64bits Adds support for 64 bits integer + There has been many request to add support for 64 bits integer. Use case for this are: + - time is nowdays commonly represented with a 64 bits integer + - 64 bits integer are frequently used as primary key id in many systems + + Plans to add support is: + - must be optional, a configuration option since not all platforms provides 64 bits integer types. + - move definition of Int and UInt from forwards.h to config.h, with the required platform magic. + - C++ defines no standard to define 64 bits integer. Rely on msvc extension, and long long type that + is widely supported. \section ms_release Makes JsonCpp ready for release - Build system clean-up: - Fix build on Windows (shared-library build is broken) @@ -11,7 +21,6 @@ - aix/vacpp - Add JsonCpp version to header as numeric for use in preprocessor test - Remove buggy experimental hash stuff - - Release on sourceforge download \section ms_strict Adds a strict mode to reader/parser Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - Enforce only object or array as root element From fa88ce7eb365c628bada0cd8eefa9d19bc875696 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 19 Apr 2010 07:37:41 +0000 Subject: [PATCH 149/268] - Moved definition of Json::Int and Json::UInt to config.h which compiler detection logic to define them to 64 bits integer if JSON_NO_INT64 is not defined. - Added Json::ArrayIndex as an unsigned int to forwards.h - Modified Json::Value to consistently use Json::ArrayIndex. - Added int/unsigned int constructor overload to Json::Value to avoid ambiguous constructor call. - Modified jsontestrunner/main.cpp to use Json::valueToString for Value::asInt() conversion to string. - Modified Json::Reader to only overflow to double when the number is too large (previous code relied on the fact that an int fitted in a double without precision loss). - Generalized uintToString() helpers and buffer size to automatically adapt to the precision of Json::UInt. - Added specific conversion logic for UInt to double conversion on Microsoft Visual Studio 6 which only support __int64 to double conversion (unsigned __int64 conversion is not supported) - Added test for 64 bits parsing/writing. Notes: those will fail when compiled with JSON_NO_INT64 (more dev required to adapt). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@149 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 15 ++++- trunk/jsoncpp/include/json/config.h | 28 ++++++++ trunk/jsoncpp/include/json/forwards.h | 3 +- trunk/jsoncpp/include/json/value.h | 28 ++++---- trunk/jsoncpp/src/jsontestrunner/main.cpp | 23 ++++++- trunk/jsoncpp/src/lib_json/json_reader.cpp | 23 +++++-- trunk/jsoncpp/src/lib_json/json_tool.h | 16 ++++- trunk/jsoncpp/src/lib_json/json_value.cpp | 64 ++++++++++++++----- trunk/jsoncpp/src/lib_json/json_writer.cpp | 4 +- .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + 15 files changed, 170 insertions(+), 43 deletions(-) create mode 100644 trunk/jsoncpp/test/data/test_integer_06_64bits.expected create mode 100644 trunk/jsoncpp/test/data/test_integer_06_64bits.json create mode 100644 trunk/jsoncpp/test/data/test_integer_07_64bits.expected create mode 100644 trunk/jsoncpp/test/data/test_integer_07_64bits.json create mode 100644 trunk/jsoncpp/test/data/test_integer_08_64bits.expected create mode 100644 trunk/jsoncpp/test/data/test_integer_08_64bits.json diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index b5e8cf6..50116aa 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -18,4 +18,17 @@ initialization/destruction order issues (bug #2934500). The DefaultValueAllocator has been inlined in code. - + - Added support for 64 bits integer. Json::Int and Json::UInt are + now 64 bits integers on system that support them (more precisely + they are of the size of long long, so if it is 128 bits it will + also work). + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Notes: you can switch back to the 32 bits only behavior by defining the + macro JSON_NO_INT64 (se include/json/config.h). + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 5d334cb..a0fed8a 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -40,4 +40,32 @@ # define JSON_API # endif +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + + +namespace Json { +# if defined(JSON_NO_INT64) + typedef int Int; + typedef unsigned int UInt; +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int; + typedef unsigned __int64 UInt; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int; + typedef unsigned long long int UInt; +# endif // if defined(_MSC_VER) +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + #endif // JSON_CONFIG_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index 815075e..3a10a3b 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -16,8 +16,7 @@ namespace Json { class Features; // value.h - typedef int Int; - typedef unsigned int UInt; + typedef unsigned int ArrayIndex; class StaticString; class Path; class PathArgument; diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 5d1bc81..f7b9c34 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -121,7 +121,7 @@ namespace Json { typedef ValueConstIterator const_iterator; typedef Json::UInt UInt; typedef Json::Int Int; - typedef UInt ArrayIndex; + typedef Json::ArrayIndex ArrayIndex; static const Value null; static const Int minInt; @@ -140,20 +140,20 @@ namespace Json { duplicate, duplicateOnCopy }; - CZString( int index ); + CZString( ArrayIndex index ); CZString( const char *cstr, DuplicationPolicy allocate ); CZString( const CZString &other ); ~CZString(); CZString &operator =( const CZString &other ); bool operator<( const CZString &other ) const; bool operator==( const CZString &other ) const; - int index() const; + ArrayIndex index() const; const char *c_str() const; bool isStaticString() const; private: void swap( CZString &other ); const char *cstr_; - int index_; + ArrayIndex index_; }; public: @@ -182,6 +182,10 @@ namespace Json { \endcode */ Value( ValueType type = nullValue ); +#if !defined(JSON_NO_INT64) + Value( int value ); + Value( ArrayIndex value ); +#endif // if !defined(JSON_NO_INT64) Value( Int value ); Value( UInt value ); Value( double value ); @@ -248,7 +252,7 @@ namespace Json { bool isConvertibleTo( ValueType other ) const; /// Number of values in array or object - UInt size() const; + ArrayIndex size() const; /// \brief Return true if empty array, empty object, or null; /// otherwise, false. @@ -267,24 +271,24 @@ namespace Json { /// May only be called on nullValue or arrayValue. /// \pre type() is arrayValue or nullValue /// \post type() is arrayValue - void resize( UInt size ); + void resize( ArrayIndex size ); /// Access an array element (zero based index ). /// If the array contains less than index element, then null value are inserted /// in the array so that its size is index+1. /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) - Value &operator[]( UInt index ); + Value &operator[]( ArrayIndex index ); /// Access an array element (zero based index ) /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) - const Value &operator[]( UInt index ) const; + const Value &operator[]( ArrayIndex index ) const; /// If the array contains at least index+1 elements, returns the element value, /// otherwise returns defaultValue. - Value get( UInt index, + Value get( ArrayIndex index, const Value &defaultValue ) const; /// Return true if index < size(). - bool isValidIndex( UInt index ) const; + bool isValidIndex( ArrayIndex index ) const; /// \brief Append value to array at the end. /// /// Equivalent to jsonvalue[jsonvalue.size()] = value; @@ -454,7 +458,7 @@ namespace Json { friend class Path; PathArgument(); - PathArgument( UInt index ); + PathArgument( ArrayIndex index ); PathArgument( const char *key ); PathArgument( const std::string &key ); @@ -466,7 +470,7 @@ namespace Json { kindKey }; std::string key_; - UInt index_; + ArrayIndex index_; Kind kind_; }; diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 231ee0c..3e6cd5d 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -35,10 +35,10 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) fprintf( fout, "%s=null\n", path.c_str() ); break; case Json::intValue: - fprintf( fout, "%s=%d\n", path.c_str(), value.asInt() ); + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asInt() ).c_str() ); break; case Json::uintValue: - fprintf( fout, "%s=%u\n", path.c_str(), value.asUInt() ); + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asUInt() ).c_str() ); break; case Json::realValue: fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); @@ -148,6 +148,19 @@ removeSuffix( const std::string &path, return path.substr( 0, path.length() - extension.length() ); } + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + static int printUsage( const char *argv[] ) { @@ -175,6 +188,12 @@ parseCommandLine( int argc, const char *argv[], ++index; } + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + if ( index == argc || index + 1 < argc ) { return printUsage( argv ); diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index da8e83b..2bd38f0 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -555,21 +555,36 @@ Reader::decodeNumber( Token &token ) } if ( isDouble ) return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. Location current = token.start_; bool isNegative = *current == '-'; if ( isNegative ) ++current; - Value::UInt threshold = (isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt) / 10; + Value::UInt maxIntegerValue = isNegative ? Value::UInt(-Value::minInt) + : Value::maxUInt; + Value::UInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = maxIntegerValue % 10; + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); Value::UInt value = 0; while ( current < token.end_ ) { Char c = *current++; if ( c < '0' || c > '9' ) return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); if ( value >= threshold ) - return decodeDouble( token ); - value = value * 10 + Value::UInt(c - '0'); + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; } if ( isNegative ) currentValue() = -Value::Int( value ); diff --git a/trunk/jsoncpp/src/lib_json/json_tool.h b/trunk/jsoncpp/src/lib_json/json_tool.h index ca4ea4f..5ffc2de 100644 --- a/trunk/jsoncpp/src/lib_json/json_tool.h +++ b/trunk/jsoncpp/src/lib_json/json_tool.h @@ -56,18 +56,28 @@ isControlCharacter(char ch) } +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(UInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + /** Converts an unsigned integer to string. * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. Must have at least 10 chars free. + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. */ static inline void -uintToString( unsigned int value, +uintToString( UInt value, char *¤t ) { *--current = 0; do { - *--current = (value % 10) + '0'; + *--current = char(value % 10) + '0'; value /= 10; } while ( value != 0 ); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 1ccf70f..60362ad 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -119,7 +119,7 @@ Value::CommentInfo::setComment( const char *text ) // Notes: index_ indicates if the string was allocated when // a string is stored. -Value::CZString::CZString( int index ) +Value::CZString::CZString( ArrayIndex index ) : cstr_( 0 ) , index_( index ) { @@ -179,7 +179,7 @@ Value::CZString::operator==( const CZString &other ) const } -int +ArrayIndex Value::CZString::index() const { return index_; @@ -257,6 +257,30 @@ Value::Value( ValueType type ) } +#if !defined(JSON_NO_INT64) +Value::Value( ArrayIndex value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if !defined(JSON_NO_INT64) + + Value::Value( Int value ) : type_( intValue ) , comments_( 0 ) @@ -310,7 +334,7 @@ Value::Value( const char *beginValue, #endif { value_.string_ = duplicateStringValue( beginValue, - UInt(endValue - beginValue) ); + (unsigned int)(endValue - beginValue) ); } @@ -722,9 +746,13 @@ Value::asDouble() const case nullValue: return 0.0; case intValue: - return value_.int_; + return static_cast( value_.int_ ); case uintValue: - return value_.uint_; +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) case realValue: return value_.real_; case booleanValue: @@ -817,7 +845,7 @@ Value::isConvertibleTo( ValueType other ) const /// Number of values in array or object -Value::UInt +ArrayIndex Value::size() const { switch ( type_ ) @@ -839,7 +867,7 @@ Value::size() const } return 0; case objectValue: - return Int( value_.map_->size() ); + return ArrayIndex( value_.map_->size() ); #else case arrayValue: return Int( value_.array_->size() ); @@ -896,21 +924,23 @@ Value::clear() } void -Value::resize( UInt newSize ) +Value::resize( ArrayIndex newSize ) { JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) *this = Value( arrayValue ); #ifndef JSON_VALUE_USE_INTERNAL_MAP - UInt oldSize = size(); + ArrayIndex oldSize = size(); if ( newSize == 0 ) clear(); else if ( newSize > oldSize ) (*this)[ newSize - 1 ]; else { - for ( UInt index = newSize; index < oldSize; ++index ) + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { value_.map_->erase( index ); + } assert( size() == newSize ); } #else @@ -920,7 +950,7 @@ Value::resize( UInt newSize ) Value & -Value::operator[]( UInt index ) +Value::operator[]( ArrayIndex index ) { JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) @@ -941,7 +971,7 @@ Value::operator[]( UInt index ) const Value & -Value::operator[]( UInt index ) const +Value::operator[]( ArrayIndex index ) const { JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); if ( type_ == nullValue ) @@ -991,7 +1021,7 @@ Value::resolveReference( const char *key, Value -Value::get( UInt index, +Value::get( ArrayIndex index, const Value &defaultValue ) const { const Value *value = &((*this)[index]); @@ -1000,7 +1030,7 @@ Value::get( UInt index, bool -Value::isValidIndex( UInt index ) const +Value::isValidIndex( ArrayIndex index ) const { return index < size(); } @@ -1463,7 +1493,7 @@ PathArgument::PathArgument() } -PathArgument::PathArgument( Value::UInt index ) +PathArgument::PathArgument( ArrayIndex index ) : index_( index ) , kind_( kindIndex ) { @@ -1519,9 +1549,9 @@ Path::makePath( const std::string &path, addPathInArg( path, in, itInArg, PathArgument::kindIndex ); else { - Value::UInt index = 0; + ArrayIndex index = 0; for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + Value::UInt(*current - '0'); + index = index * 10 + ArrayIndex(*current - '0'); args_.push_back( index ); } if ( current == end || *current++ != ']' ) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 3b926e6..92782db 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -26,7 +26,7 @@ static bool containsControlCharacter( const char* str ) std::string valueToString( Int value ) { - char buffer[32]; + UIntToStringBuffer buffer; char *current = buffer + sizeof(buffer); bool isNegative = value < 0; if ( isNegative ) @@ -41,7 +41,7 @@ std::string valueToString( Int value ) std::string valueToString( UInt value ) { - char buffer[32]; + UIntToStringBuffer buffer; char *current = buffer + sizeof(buffer); uintToString( value, current ); assert( current >= buffer ); diff --git a/trunk/jsoncpp/test/data/test_integer_06_64bits.expected b/trunk/jsoncpp/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/trunk/jsoncpp/test/data/test_integer_06_64bits.json b/trunk/jsoncpp/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/trunk/jsoncpp/test/data/test_integer_07_64bits.expected b/trunk/jsoncpp/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/trunk/jsoncpp/test/data/test_integer_07_64bits.json b/trunk/jsoncpp/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/trunk/jsoncpp/test/data/test_integer_08_64bits.expected b/trunk/jsoncpp/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/trunk/jsoncpp/test/data/test_integer_08_64bits.json b/trunk/jsoncpp/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + From 96a6cdf7f1ade5269ee7e8b911b48027df82e8ea Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 20 Apr 2010 21:35:19 +0000 Subject: [PATCH 150/268] JsonCpp is now licensed under MIT license, or public domain if desired and recognized in your jurisdiction. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@150 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/LICENSE | 58 +++++- trunk/jsoncpp/NEWS.txt | 5 + trunk/jsoncpp/README.txt | 6 + trunk/jsoncpp/devtools/licenseupdater.py | 93 +++++++++ trunk/jsoncpp/doc/jsoncpp.dox | 5 +- trunk/jsoncpp/doxybuild.py | 1 + trunk/jsoncpp/include/json/autolink.h | 5 + trunk/jsoncpp/include/json/config.h | 5 + trunk/jsoncpp/include/json/features.h | 89 +++++---- trunk/jsoncpp/include/json/forwards.h | 5 + trunk/jsoncpp/include/json/json.h | 5 + trunk/jsoncpp/include/json/reader.h | 5 + trunk/jsoncpp/include/json/value.h | 5 + trunk/jsoncpp/include/json/writer.h | 5 + trunk/jsoncpp/src/jsontestrunner/main.cpp | 5 + .../src/lib_json/json_batchallocator.h | 5 + .../src/lib_json/json_internalarray.inl | 5 + .../jsoncpp/src/lib_json/json_internalmap.inl | 5 + trunk/jsoncpp/src/lib_json/json_reader.cpp | 5 + trunk/jsoncpp/src/lib_json/json_tool.h | 181 +++++++++--------- trunk/jsoncpp/src/lib_json/json_value.cpp | 5 + .../src/lib_json/json_valueiterator.inl | 5 + trunk/jsoncpp/src/lib_json/json_writer.cpp | 5 + trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 5 + trunk/jsoncpp/src/test_lib_json/jsontest.h | 5 + trunk/jsoncpp/src/test_lib_json/main.cpp | 5 + 26 files changed, 396 insertions(+), 132 deletions(-) create mode 100644 trunk/jsoncpp/devtools/licenseupdater.py diff --git a/trunk/jsoncpp/LICENSE b/trunk/jsoncpp/LICENSE index d20fb29..7ba548c 100644 --- a/trunk/jsoncpp/LICENSE +++ b/trunk/jsoncpp/LICENSE @@ -1 +1,57 @@ -The json-cpp library and this documentation are in Public Domain. +This is the LICENSE file for JsonCpp, a C++ library implementing a +JSON format reader and writer. + +Author: Baptiste Lepilleur + +The license for this library's code is as follows: + + - If the code is used in a jurisdiction where Public Domain + property is regonized, then this code may be considered to be + in the Public Domain. Its author expressly disclaims copyright + in jurisdictions where such a disclaimer is allowed. + + - If the code is used in a jurisdiction which does not recognize + Public Domain, the code must be used in terms with the MIT license, + as described clearly and concisely at: + + http://en.wikipedia.org/wiki/MIT_License + + and reproduced in full below. + + - If the code is used in a jurisdiction which recognizes Public + Domain, the user may instead use the code under the terms of the + MIT license. + + The MIT licensing terms follow: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 50116aa..63b43b1 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -32,3 +32,8 @@ - The type Json::ArrayIndex is used for indexes of a JSON value array. It is an unsigned int (typically 32 bits). + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 424e42d..5c3b334 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -120,3 +120,9 @@ Below is a short description of the content of each file: jsontest.exe from reading test_complex_01.rewrite. test_complex_01.process-output: jsontest.exe output, typically useful to understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/trunk/jsoncpp/devtools/licenseupdater.py b/trunk/jsoncpp/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/trunk/jsoncpp/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 1cde6ff..05f23b5 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -117,7 +117,10 @@ Permanent link to the latest revision of the file in subversion: - UTF-8 and Unicode FAQ. \section _license License -The json-cpp library and this documentation are in Public Domain. +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. \author Baptiste Lepilleur */ diff --git a/trunk/jsoncpp/doxybuild.py b/trunk/jsoncpp/doxybuild.py index 0a31aae..03ad68d 100644 --- a/trunk/jsoncpp/doxybuild.py +++ b/trunk/jsoncpp/doxybuild.py @@ -127,6 +127,7 @@ def yesno( bool ): tarball_sources = [ output_dir, 'README.txt', + 'LICENSE', 'NEWS.txt', 'version' ] diff --git a/trunk/jsoncpp/include/json/autolink.h b/trunk/jsoncpp/include/json/autolink.h index 37c9258..02328d1 100644 --- a/trunk/jsoncpp/include/json/autolink.h +++ b/trunk/jsoncpp/include/json/autolink.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSON_AUTOLINK_H_INCLUDED # define JSON_AUTOLINK_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index a0fed8a..3fe08f2 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSON_CONFIG_H_INCLUDED # define JSON_CONFIG_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/features.h b/trunk/jsoncpp/include/json/features.h index f1404f6..fd8e350 100644 --- a/trunk/jsoncpp/include/json/features.h +++ b/trunk/jsoncpp/include/json/features.h @@ -1,42 +1,47 @@ -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -# include "forwards.h" - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +# include "forwards.h" + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index 3a10a3b..dd32fa0 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSON_FORWARDS_H_INCLUDED # define JSON_FORWARDS_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/json.h b/trunk/jsoncpp/include/json/json.h index c71ed65..da5fc96 100644 --- a/trunk/jsoncpp/include/json/json.h +++ b/trunk/jsoncpp/include/json/json.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSON_JSON_H_INCLUDED # define JSON_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index ee1d6a2..62232ea 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef CPPTL_JSON_READER_H_INCLUDED # define CPPTL_JSON_READER_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index f7b9c34..4de2c26 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef CPPTL_JSON_H_INCLUDED # define CPPTL_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 5f4b83b..4d74f93 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSON_WRITER_H_INCLUDED # define JSON_WRITER_H_INCLUDED diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 3e6cd5d..be3f44c 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #include #include // sort #include diff --git a/trunk/jsoncpp/src/lib_json/json_batchallocator.h b/trunk/jsoncpp/src/lib_json/json_batchallocator.h index 87ea5ed..173e2ed 100644 --- a/trunk/jsoncpp/src/lib_json/json_batchallocator.h +++ b/trunk/jsoncpp/src/lib_json/json_batchallocator.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED # define JSONCPP_BATCHALLOCATOR_H_INCLUDED diff --git a/trunk/jsoncpp/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl index 9b985d2..66d838e 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalarray.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalarray.inl @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + // included by json_value.cpp // everything is within Json namespace diff --git a/trunk/jsoncpp/src/lib_json/json_internalmap.inl b/trunk/jsoncpp/src/lib_json/json_internalmap.inl index bade5d5..d0dd62a 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalmap.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalmap.inl @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + // included by json_value.cpp // everything is within Json namespace diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 2bd38f0..d2c255c 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #include #include #include "json_tool.h" diff --git a/trunk/jsoncpp/src/lib_json/json_tool.h b/trunk/jsoncpp/src/lib_json/json_tool.h index 5ffc2de..c20639d 100644 --- a/trunk/jsoncpp/src/lib_json/json_tool.h +++ b/trunk/jsoncpp/src/lib_json/json_tool.h @@ -1,88 +1,93 @@ -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(UInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( UInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(UInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( UInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 60362ad..b257b45 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #include #include #include diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index 736e260..bd7c8d2 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + // included by json_value.cpp // everything is within Json namespace diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 92782db..7882acf 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #include #include "json_tool.h" #include diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index a07d0fe..02e7b21 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC #include "jsontest.h" #include diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 8f0bd31..75c7f78 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #ifndef JSONTEST_H_INCLUDED # define JSONTEST_H_INCLUDED diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index b80776d..3e5b53d 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -1,3 +1,8 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + #include #include "jsontest.h" From 634119659c9d826224292e260b2256365994c372 Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 27 Apr 2010 16:37:50 +0000 Subject: [PATCH 151/268] - added unit test and roadmap for handling of escape sequence "\/" git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@151 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 6 +++++- trunk/jsoncpp/test/data/test_string_03.expected | 1 + trunk/jsoncpp/test/data/test_string_03.json | 1 + 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 trunk/jsoncpp/test/data/test_string_03.expected create mode 100644 trunk/jsoncpp/test/data/test_string_03.json diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index d38ebe8..1ec0ab6 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -26,6 +26,10 @@ - Enforce only object or array as root element - Disable comment support - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". \section ms_separation Expose json reader/writer API that do not impose using Json::Value. Some typical use-case involve an application specific structure to/from a JSON document. - Event base parser to allow unserializing a Json document directly in datastructure instead of @@ -35,7 +39,7 @@ - Provides an event based parser. Should allow pulling & skipping events for ease of use. - Provides a JSON document builder: fast only. \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation + - Provides support for static property name definition avoiding allocation - Static property dictionnary can be provided to JSON reader - Performance scenario & benchmarking */ diff --git a/trunk/jsoncpp/test/data/test_string_03.expected b/trunk/jsoncpp/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/trunk/jsoncpp/test/data/test_string_03.json b/trunk/jsoncpp/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" From c440cdad947437d0a2f991a2602ed23f165b45ac Mon Sep 17 00:00:00 2001 From: blep Date: Tue, 27 Apr 2010 16:38:30 +0000 Subject: [PATCH 152/268] updated license with clearer information git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@152 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/LICENSE | 36 +++++++++++++++++------------------- trunk/jsoncpp/NEWS.txt | 7 +++++++ trunk/jsoncpp/README.txt | 4 ++++ 3 files changed, 28 insertions(+), 19 deletions(-) diff --git a/trunk/jsoncpp/LICENSE b/trunk/jsoncpp/LICENSE index 7ba548c..ca2bfe1 100644 --- a/trunk/jsoncpp/LICENSE +++ b/trunk/jsoncpp/LICENSE @@ -1,28 +1,26 @@ -This is the LICENSE file for JsonCpp, a C++ library implementing a -JSON format reader and writer. +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... -Author: Baptiste Lepilleur +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. -The license for this library's code is as follows: +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). - - If the code is used in a jurisdiction where Public Domain - property is regonized, then this code may be considered to be - in the Public Domain. Its author expressly disclaims copyright - in jurisdictions where such a disclaimer is allowed. +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. - - If the code is used in a jurisdiction which does not recognize - Public Domain, the code must be used in terms with the MIT license, - as described clearly and concisely at: +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: http://en.wikipedia.org/wiki/MIT_License - - and reproduced in full below. - - - If the code is used in a jurisdiction which recognizes Public - Domain, the user may instead use the code under the terms of the - MIT license. - - The MIT licensing terms follow: + +The full text of the MIT License follows: ======================================================================== Copyright (c) 2007-2010 Baptiste Lepilleur diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 63b43b1..7c39c81 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -33,7 +33,14 @@ - The type Json::ArrayIndex is used for indexes of a JSON value array. It is an unsigned int (typically 32 bits). +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + * License - See file LICENSE for details. Basically JsonCpp is now licensed under MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 5c3b334..dc4e341 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -79,6 +79,10 @@ python doxybuild.py --open --with-dot See doxybuild.py --help for options. +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + * Adding a reader/writer test: ============================ From f0e7ce8cd7c3999207cfa04d2fcdafb6a5bbf097 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 24 Dec 2010 12:47:14 +0000 Subject: [PATCH 153/268] - Array index can be passed as int to operator[], allowing use of literal: Json::Value array; array.append( 1234 ); int value = array[0].asInt(); // did not compile previously git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@153 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 5 +++++ trunk/jsoncpp/include/json/value.h | 18 ++++++++++++++++-- trunk/jsoncpp/src/jsontestrunner/main.cpp | 4 ++++ trunk/jsoncpp/src/lib_json/json_value.cpp | 16 ++++++++++++++++ trunk/jsoncpp/src/test_lib_json/jsontest.h | 2 +- trunk/jsoncpp/src/test_lib_json/main.cpp | 14 ++++++++++++++ 6 files changed, 56 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 7c39c81..250b730 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -32,6 +32,11 @@ - The type Json::ArrayIndex is used for indexes of a JSON value array. It is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously * Tests diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 4de2c26..e9632a2 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -284,11 +284,25 @@ namespace Json { /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) Value &operator[]( ArrayIndex index ); - /// Access an array element (zero based index ) + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) const Value &operator[]( ArrayIndex index ) const; - /// If the array contains at least index+1 elements, returns the element value, + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, /// otherwise returns defaultValue. Value get( ArrayIndex index, const Value &defaultValue ) const; diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index be3f44c..67344e0 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -3,6 +3,10 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE +/* This executable is used for testing parser/writer using real JSON files. + */ + + #include #include // sort #include diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index b257b45..a7b7328 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -975,6 +975,14 @@ Value::operator[]( ArrayIndex index ) } +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + const Value & Value::operator[]( ArrayIndex index ) const { @@ -994,6 +1002,14 @@ Value::operator[]( ArrayIndex index ) const } +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + Value & Value::operator[]( const char *key ) { diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 75c7f78..0d07238 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -199,7 +199,7 @@ namespace JsonTest { /// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; /// JSONTEST_ASSERT( x == y ); #define JSONTEST_ASSERT( expr ) \ - if ( condition ) \ + if ( expr ) \ { \ } \ else \ diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 3e5b53d..30d039c 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -172,6 +172,18 @@ JSONTEST_FIXTURE( ValueTest, isUInt ) } +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + void ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) { @@ -245,5 +257,7 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); return runner.runCommandLine( argc, argv ); } From a0c689528108c11ac7769246168a3af6a5c44da0 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 24 Dec 2010 19:30:06 +0000 Subject: [PATCH 154/268] Added float Json::Value::asFloat() to obtain a floating point value as a float (avoid lost of precision warning caused by used of asDouble() to initialize a float). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@154 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 4 ++++ trunk/jsoncpp/include/json/value.h | 1 + trunk/jsoncpp/src/lib_json/json_value.cpp | 29 +++++++++++++++++++++++ trunk/jsoncpp/src/test_lib_json/main.cpp | 8 +++++++ 4 files changed, 42 insertions(+) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 250b730..ded333f 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -38,6 +38,10 @@ array.append( 1234 ); int value = array[0].asInt(); // did not compile previously + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + * Tests - Added test to ensure that the escape sequence "\/" is corrected handled diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index e9632a2..8d0d4c1 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -240,6 +240,7 @@ namespace Json { # endif Int asInt() const; UInt asUInt() const; + float asFloat() const; double asDouble() const; bool asBool() const; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index a7b7328..15a1140 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -772,6 +772,35 @@ Value::asDouble() const return 0; // unreachable; } +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + bool Value::asBool() const { diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 30d039c..de64200 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -28,6 +28,7 @@ struct ValueTest : JsonTest::TestCase Json::Value unsignedInteger_; Json::Value smallUnsignedInteger_; Json::Value real_; + Json::Value float_; Json::Value array1_; Json::Value object1_; Json::Value emptyString_; @@ -43,6 +44,7 @@ struct ValueTest : JsonTest::TestCase , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) , unsignedInteger_( 34567890u ) , real_( 1234.56789 ) + , float_( 0.00390625f ) , emptyString_( "" ) , string1_( "a" ) , string_( "sometext with space" ) @@ -184,6 +186,11 @@ JSONTEST_FIXTURE( ValueTest, accessArray ) } +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + void ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) { @@ -259,5 +266,6 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); return runner.runCommandLine( argc, argv ); } From 43f2329afb44890bc7144ea30a083b8bfbd98a19 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 24 Dec 2010 19:58:23 +0000 Subject: [PATCH 155/268] Fixed some documentation issues pointed out by Daniel. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@155 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/header.html | 2 +- trunk/jsoncpp/doc/jsoncpp.dox | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/doc/header.html b/trunk/jsoncpp/doc/header.html index d56ea59..1a6ad61 100644 --- a/trunk/jsoncpp/doc/header.html +++ b/trunk/jsoncpp/doc/header.html @@ -11,7 +11,7 @@ diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 05f23b5..4e05a28 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -22,7 +22,7 @@ Here is an example of JSON data: ], // Tab indent size - "indent" : { "length" : 3, "use_space" = true } + "indent" : { "length" : 3, "use_space": true } } \endverbatim @@ -109,7 +109,7 @@ Permanent link to the latest revision of the file in subversion: \section _plinks Project links - json-cpp home -- json-cpp sourceforge project +- json-cpp sourceforge project \section _rlinks Related links - JSON Specification and alternate language implementations. From 4651156026a1aa533b1e8b0ac1f42dfa47ac6c37 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 27 Dec 2010 17:45:23 +0000 Subject: [PATCH 156/268] Major rework of 64 integer support: 64 bits integer are only returned when explicitly request via Json::Value::asInt64(), unlike previous implementation where Json::Value::asInt() returned a 64 bits integer. This eases porting portable code and does not break compatibility with the previous release. Json::Value::asLargestInt() has also be added to ease writing portable code independent of 64 bits integer support. It is typically used to implement writers. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@156 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 35 +++++-- trunk/jsoncpp/include/json/config.h | 18 ++-- trunk/jsoncpp/include/json/value.h | 41 ++++++-- trunk/jsoncpp/include/json/writer.h | 4 + trunk/jsoncpp/src/jsontestrunner/main.cpp | 60 ++++++----- trunk/jsoncpp/src/lib_json/json_reader.cpp | 16 +-- trunk/jsoncpp/src/lib_json/json_tool.h | 4 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 116 +++++++++++++++++++-- trunk/jsoncpp/src/lib_json/json_writer.cpp | 35 +++++-- 9 files changed, 253 insertions(+), 76 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index ded333f..9924691 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -18,17 +18,38 @@ initialization/destruction order issues (bug #2934500). The DefaultValueAllocator has been inlined in code. - - Added support for 64 bits integer. Json::Int and Json::UInt are - now 64 bits integers on system that support them (more precisely - they are of the size of long long, so if it is 128 bits it will - also work). + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. Warning: Json::Value::asInt() and Json::Value::asUInt() now returns long long. This changes break code that was passing the return value to *printf() function. - - Notes: you can switch back to the 32 bits only behavior by defining the - macro JSON_NO_INT64 (se include/json/config.h). + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. - The type Json::ArrayIndex is used for indexes of a JSON value array. It is an unsigned int (typically 32 bits). diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 3fe08f2..55f0583 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -46,7 +46,7 @@ # endif // If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages. +// Storages, and 64 bits integer support is disabled. // #define JSON_NO_INT64 1 #if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 @@ -57,18 +57,24 @@ namespace Json { -# if defined(JSON_NO_INT64) typedef int Int; typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 # else // if defined(JSON_NO_INT64) // For Microsoft Visual use specific types as long long is not supported # if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int; - typedef unsigned __int64 UInt; + typedef __int64 Int64; + typedef unsigned __int64 UInt64; # else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int; - typedef unsigned long long int UInt; + typedef long long int Int64; + typedef unsigned long long int UInt64; # endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 # endif // if defined(JSON_NO_INT64) } // end namespace Json diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 8d0d4c1..14464e4 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -126,13 +126,36 @@ namespace Json { typedef ValueConstIterator const_iterator; typedef Json::UInt UInt; typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; typedef Json::ArrayIndex ArrayIndex; static const Value null; - static const Int minInt; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. static const UInt maxUInt; + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + private: #ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION # ifndef JSON_VALUE_USE_INTERNAL_MAP @@ -187,12 +210,12 @@ namespace Json { \endcode */ Value( ValueType type = nullValue ); -#if !defined(JSON_NO_INT64) - Value( int value ); - Value( ArrayIndex value ); -#endif // if !defined(JSON_NO_INT64) Value( Int value ); Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) Value( double value ); Value( const char *value ); Value( const char *beginValue, const char *endValue ); @@ -240,6 +263,10 @@ namespace Json { # endif Int asInt() const; UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; float asFloat() const; double asDouble() const; bool asBool() const; @@ -448,8 +475,8 @@ namespace Json { union ValueHolder { - Int int_; - UInt uint_; + LargestInt int_; + LargestUInt uint_; double real_; bool bool_; char *string_; diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 4d74f93..2ee13de 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -162,8 +162,12 @@ namespace Json { bool addChildValues_; }; +# if defined(JSON_HAS_INT64) std::string JSON_API valueToString( Int value ); std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); std::string JSON_API valueToString( double value ); std::string JSON_API valueToString( bool value ); std::string JSON_API valueToQuotedString( const char *value ); diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 67344e0..2da3ede 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -44,10 +44,10 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) fprintf( fout, "%s=null\n", path.c_str() ); break; case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asInt() ).c_str() ); + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); break; case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asUInt() ).c_str() ); + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); break; case Json::realValue: fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); @@ -224,37 +224,45 @@ int main( int argc, const char *argv[] ) return exitCode; } - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) + try { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } } } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } return exitCode; } diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index d2c255c..60dc4c9 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -567,12 +567,12 @@ Reader::decodeNumber( Token &token ) bool isNegative = *current == '-'; if ( isNegative ) ++current; - Value::UInt maxIntegerValue = isNegative ? Value::UInt(-Value::minInt) - : Value::maxUInt; - Value::UInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = maxIntegerValue % 10; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::UInt value = 0; + Value::LargestUInt value = 0; while ( current < token.end_ ) { Char c = *current++; @@ -592,9 +592,9 @@ Reader::decodeNumber( Token &token ) value = value * 10 + digit; } if ( isNegative ) - currentValue() = -Value::Int( value ); - else if ( value <= Value::UInt(Value::maxInt) ) - currentValue() = Value::Int( value ); + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); else currentValue() = value; return true; diff --git a/trunk/jsoncpp/src/lib_json/json_tool.h b/trunk/jsoncpp/src/lib_json/json_tool.h index c20639d..658031b 100644 --- a/trunk/jsoncpp/src/lib_json/json_tool.h +++ b/trunk/jsoncpp/src/lib_json/json_tool.h @@ -63,7 +63,7 @@ isControlCharacter(char ch) enum { /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(UInt)+1 + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 }; // Defines a char buffer for use with uintToString(). @@ -76,7 +76,7 @@ typedef char UIntToStringBuffer[uintToStringBufferSize]; * Must have at least uintToStringBufferSize chars free. */ static inline void -uintToString( UInt value, +uintToString( LargestUInt value, char *¤t ) { *--current = 0; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 15a1140..218c127 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -28,6 +28,13 @@ const Value Value::null; const Int Value::minInt = Int( ~(UInt(-1)/2) ); const Int Value::maxInt = Int( UInt(-1)/2 ); const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + /// Unknown size marker enum { unknown = (unsigned)-1 }; @@ -262,8 +269,8 @@ Value::Value( ValueType type ) } -#if !defined(JSON_NO_INT64) -Value::Value( ArrayIndex value ) +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) : type_( uintValue ) , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -273,7 +280,7 @@ Value::Value( ArrayIndex value ) value_.uint_ = value; } -Value::Value( int value ) +Value::Value( Int value ) : type_( intValue ) , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -283,10 +290,10 @@ Value::Value( int value ) value_.int_ = value; } -#endif // if !defined(JSON_NO_INT64) +#endif // if defined(JSON_HAS_INT64) -Value::Value( Int value ) +Value::Value( Int64 value ) : type_( intValue ) , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -297,7 +304,7 @@ Value::Value( Int value ) } -Value::Value( UInt value ) +Value::Value( UInt64 value ) : type_( uintValue ) , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -689,6 +696,7 @@ Value::asConstString() const } # endif + Value::Int Value::asInt() const { @@ -697,10 +705,11 @@ Value::asInt() const case nullValue: return 0; case intValue: - return value_.int_; + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ < (unsigned)maxInt, "integer out of signed integer range" ); - return value_.uint_; + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); case realValue: JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); return Int( value_.real_ ); @@ -716,6 +725,7 @@ Value::asInt() const return 0; // unreachable; } + Value::UInt Value::asUInt() const { @@ -725,9 +735,11 @@ Value::asUInt() const return 0; case intValue: JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - return value_.int_; + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); case uintValue: - return value_.uint_; + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); case realValue: JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); return UInt( value_.real_ ); @@ -743,6 +755,88 @@ Value::asUInt() const return 0; // unreachable; } + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + double Value::asDouble() const { diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 7882acf..f101cbc 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -29,14 +29,15 @@ static bool containsControlCharacter( const char* str ) return false; } -std::string valueToString( Int value ) + +std::string valueToString( LargestInt value ) { UIntToStringBuffer buffer; char *current = buffer + sizeof(buffer); bool isNegative = value < 0; if ( isNegative ) value = -value; - uintToString( UInt(value), current ); + uintToString( LargestUInt(value), current ); if ( isNegative ) *--current = '-'; assert( current >= buffer ); @@ -44,7 +45,7 @@ std::string valueToString( Int value ) } -std::string valueToString( UInt value ) +std::string valueToString( LargestUInt value ) { UIntToStringBuffer buffer; char *current = buffer + sizeof(buffer); @@ -53,6 +54,22 @@ std::string valueToString( UInt value ) return current; } +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + std::string valueToString( double value ) { char buffer[32]; @@ -203,10 +220,10 @@ FastWriter::writeValue( const Value &value ) document_ += "null"; break; case intValue: - document_ += valueToString( value.asInt() ); + document_ += valueToString( value.asLargestInt() ); break; case uintValue: - document_ += valueToString( value.asUInt() ); + document_ += valueToString( value.asLargestUInt() ); break; case realValue: document_ += valueToString( value.asDouble() ); @@ -286,10 +303,10 @@ StyledWriter::writeValue( const Value &value ) pushValue( "null" ); break; case intValue: - pushValue( valueToString( value.asInt() ) ); + pushValue( valueToString( value.asLargestInt() ) ); break; case uintValue: - pushValue( valueToString( value.asUInt() ) ); + pushValue( valueToString( value.asLargestUInt() ) ); break; case realValue: pushValue( valueToString( value.asDouble() ) ); @@ -562,10 +579,10 @@ StyledStreamWriter::writeValue( const Value &value ) pushValue( "null" ); break; case intValue: - pushValue( valueToString( value.asInt() ) ); + pushValue( valueToString( value.asLargestInt() ) ); break; case uintValue: - pushValue( valueToString( value.asUInt() ) ); + pushValue( valueToString( value.asLargestUInt() ) ); break; case realValue: pushValue( valueToString( value.asDouble() ) ); From d8e1a9679bfc8d8dd90bbc41d4819b4acb663e66 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 15:06:40 +0000 Subject: [PATCH 157/268] Fixed url for scons 1.2 download. Clarify manual test run executable path. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@157 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index dc4e341..2dcbd8b 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -21,7 +21,7 @@ JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires python to be installed (http://www.python.org). You download scons-local distribution from the following url: -http://sourceforge.net/project/showfiles.php?group_id=30337&package_id=67375 +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ Unzip it in the directory where you found this README file. scons.py Should be at the same level as README. @@ -52,6 +52,13 @@ and TARGET may be: * Running the test manually: ========================== +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + cd test # This will run the Reader/Writer tests python runjsontests.py "path to jsontest.exe" From baf597481c46516fb6aefefb214345f61f16981e Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 15:09:16 +0000 Subject: [PATCH 158/268] Fixed latest readme.txt url. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@158 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/jsoncpp.dox | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 4e05a28..04fc165 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -87,7 +87,7 @@ The build instructions are located in the file README.txt in the top-directory of the project. Permanent link to the latest revision of the file in subversion: -latest README.txt +latest README.txt \section _pdownload Download The sources can be downloaded from From 22ca28ef7eac116596e8527e3fe5d980bb0f85f6 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 15:40:47 +0000 Subject: [PATCH 159/268] Fixed bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now correctly detected. Modified runjsontests.py to allow test that expect failure in jsoncpp test suite. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@159 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 5 +++++ trunk/jsoncpp/src/lib_json/json_reader.cpp | 4 ++-- trunk/jsoncpp/test/data/fail_test_array_01.json | 1 + trunk/jsoncpp/test/runjsontests.py | 4 ++-- 4 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 trunk/jsoncpp/test/data/fail_test_array_01.json diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 9924691..ce1d014 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -68,6 +68,11 @@ - Added test to ensure that the escape sequence "\/" is corrected handled by the parser. +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now correctly + detected. + * License - See file LICENSE for details. Basically JsonCpp is now licensed under diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 60dc4c9..508eb16 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -533,8 +533,8 @@ Reader::readArray( Token &tokenStart ) { ok = readToken( token ); } - bool badTokenType = ( token.type_ == tokenArraySeparator && - token.type_ == tokenArrayEnd ); + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); if ( !ok || badTokenType ) { return addErrorAndRecover( "Missing ',' or ']' in array declaration", diff --git a/trunk/jsoncpp/test/data/fail_test_array_01.json b/trunk/jsoncpp/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/trunk/jsoncpp/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/trunk/jsoncpp/test/runjsontests.py b/trunk/jsoncpp/test/runjsontests.py index 800337d..ffe8bd5 100644 --- a/trunk/jsoncpp/test/runjsontests.py +++ b/trunk/jsoncpp/test/runjsontests.py @@ -49,7 +49,8 @@ def runAllTests( jsontest_executable_path, input_dir = None, failed_tests = [] valgrind_path = use_valgrind and VALGRIND_CMD or '' for input_path in tests + test_jsonchecker: - is_json_checker_test = input_path in test_jsonchecker + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure print 'TESTING:', input_path, options = is_json_checker_test and '--json-checker' or '' pipe = os.popen( "%s%s %s %s" % ( @@ -58,7 +59,6 @@ def runAllTests( jsontest_executable_path, input_dir = None, process_output = pipe.read() status = pipe.close() if is_json_checker_test: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) if expect_failure: if status is None: print 'FAILED' From 04658ddfb9850c2173509a066340030987d14d26 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 15:47:38 +0000 Subject: [PATCH 160/268] Fixed bug #3139678: stack buffer overflow when parsing a double with a length of 32 characters. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@160 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 12 ++++++++---- trunk/jsoncpp/src/lib_json/json_reader.cpp | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index ce1d014..b53f4db 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -3,8 +3,9 @@ * Compilation - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now propagated to the build - environment as this is required for some compiler installation. + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. - Added support for Microsoft Visual Studio 2008 (bug #2930462): The platform "msvc90" has been added. @@ -70,8 +71,11 @@ * Bug fixes - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now correctly - detected. + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. * License diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 508eb16..8bc75e3 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -610,7 +610,7 @@ Reader::decodeDouble( Token &token ) int length = int(token.end_ - token.start_); if ( length <= bufferSize ) { - Char buffer[bufferSize]; + Char buffer[bufferSize+1]; memcpy( buffer, token.start_, length ); buffer[length] = 0; count = sscanf( buffer, "%lf", &value ); From b4cc88023c98af5503c2566b5883877a191ad926 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 16:27:55 +0000 Subject: [PATCH 161/268] Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. Bug #3023708 (Formatted has 2 't'). The old member function is deprecated but still present for backward compatibility. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@161 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 8 +++++++- trunk/jsoncpp/doc/jsoncpp.dox | 2 +- trunk/jsoncpp/include/json/config.h | 8 ++++++++ trunk/jsoncpp/include/json/reader.h | 9 +++++++++ trunk/jsoncpp/src/jsontestrunner/main.cpp | 2 +- trunk/jsoncpp/src/lib_json/json_reader.cpp | 10 +++++++++- trunk/jsoncpp/src/lib_json/json_value.cpp | 3 ++- 7 files changed, 37 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index b53f4db..3f75156 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -64,6 +64,12 @@ float (avoid lost of precision warning caused by used of asDouble() to initialize a float). +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + * Tests - Added test to ensure that the escape sequence "\/" is corrected handled @@ -76,7 +82,7 @@ - Bug #3139678: stack buffer overflow when parsing a double with a length of 32 characters. - + * License - See file LICENSE for details. Basically JsonCpp is now licensed under diff --git a/trunk/jsoncpp/doc/jsoncpp.dox b/trunk/jsoncpp/doc/jsoncpp.dox index 04fc165..97cc108 100644 --- a/trunk/jsoncpp/doc/jsoncpp.dox +++ b/trunk/jsoncpp/doc/jsoncpp.dox @@ -46,7 +46,7 @@ if ( !parsingSuccessful ) { // report to the user the failure and their locations in the document. std::cout << "Failed to parse configuration\n" - << reader.getFormatedErrorMessages(); + << reader.getFormattedErrorMessages(); return; } diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 55f0583..8e69b7a 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -55,6 +55,14 @@ #define JSON_USE_INT64_DOUBLE_CONVERSION 1 #endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) namespace Json { typedef int Int; diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 62232ea..2cd94eb 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -72,9 +72,18 @@ namespace Json { * \return Formatted error message with the list of errors with their location in * the parsed document. An empty string is returned if no error occurred * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") std::string getFormatedErrorMessages() const; + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + private: enum TokenType { diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index 2da3ede..dfb6150 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -105,7 +105,7 @@ parseAndSaveValueTree( const std::string &input, { printf( "Failed to parse %s file: \n%s\n", kind.c_str(), - reader.getFormatedErrorMessages().c_str() ); + reader.getFormattedErrorMessages().c_str() ); return 1; } diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 8bc75e3..0d59c46 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -839,8 +839,16 @@ Reader::getLocationLineAndColumn( Location location ) const } +// Deprecated. Preserved for backward compatibility std::string Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const { std::string formattedMessage; for ( Errors::const_iterator itError = errors_.begin(); @@ -862,7 +870,7 @@ std::istream& operator>>( std::istream &sin, Value &root ) Json::Reader reader; bool ok = reader.parse(sin, root, true); //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormatedErrorMessages()); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); return sin; } diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 218c127..ce1dec3 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -54,6 +54,7 @@ duplicateStringValue( const char *value, if ( length == unknown ) length = (unsigned int)strlen(value); char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); memcpy( newString, value, length ); newString[length] = 0; return newString; @@ -112,7 +113,7 @@ Value::CommentInfo::setComment( const char *text ) { if ( comment_ ) releaseStringValue( comment_ ); - JSON_ASSERT( text ); + JSON_ASSERT( text != 0 ); JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); // It seems that /**/ style comments are acceptable as well. comment_ = duplicateStringValue( text ); From f9eeb839831cc80543ac4c4f957e13a5d6e71934 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 16:42:18 +0000 Subject: [PATCH 162/268] Added known bug reference for experimental internal map. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@162 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/config.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 8e69b7a..6344729 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -16,7 +16,7 @@ //# define JSON_USE_CPPTL_SMALLMAP 1 /// If defined, indicates that Json specific container should be used /// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 //# define JSON_VALUE_USE_INTERNAL_MAP 1 /// Force usage of standard new/malloc based allocator instead of memory pool based allocator. /// The memory pools allocator used optimization (initializing Value and ValueInternalLink From 0d6a8cf7f820c84da8b50708e2e0800b43cd9c3b Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 17:24:16 +0000 Subject: [PATCH 163/268] Added recommended include path. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@163 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 2dcbd8b..0c351c3 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -91,6 +91,13 @@ The documentation of the latest release is available online at: http://jsoncpp.sourceforge.net/ +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers should be included as follow: +#include + + * Adding a reader/writer test: ============================ From 2f2426ca05924c9c2e6fefe9dff9501de045560d Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 18:33:46 +0000 Subject: [PATCH 164/268] Added project URL. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@164 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 0c351c3..8b0efa2 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -5,8 +5,8 @@ JSON (JavaScript Object Notation) is a lightweight data-interchange format. It can represent integer, real number, string, an ordered sequence of value, and a collection of name/value pairs. -JsonCpp is a simple API to manipulate JSON value, handle serialization -and unserialization to string. +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. It can also preserve existing comment in unserialization/serialization steps, making it a convenient format to store user input files. From 2ddb57a6045623bf4e806591aa67239266172338 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:13:40 +0000 Subject: [PATCH 165/268] Added support for amalgated source and header generation (a la sqlite). Refer to README.txt section "Generating amalgated source and header" for detail. The amalgated sources are generated by concatenating JsonCpp source in the correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of other headers. Sources and header has been modified to prevent any inclusion when this macro is defined. The script amalgate.py handle the generation. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@165 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 4 +++ trunk/jsoncpp/README.txt | 28 ++++++++++++++++++- trunk/jsoncpp/include/json/config.h | 6 ++++ trunk/jsoncpp/include/json/features.h | 2 ++ trunk/jsoncpp/include/json/forwards.h | 2 ++ trunk/jsoncpp/include/json/reader.h | 2 ++ trunk/jsoncpp/include/json/value.h | 2 ++ trunk/jsoncpp/include/json/writer.h | 2 ++ .../src/lib_json/json_internalarray.inl | 5 +++- .../jsoncpp/src/lib_json/json_internalmap.inl | 5 +++- trunk/jsoncpp/src/lib_json/json_reader.cpp | 8 ++++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 24 ++++++++++------ .../src/lib_json/json_valueiterator.inl | 4 ++- trunk/jsoncpp/src/lib_json/json_writer.cpp | 6 ++-- 14 files changed, 82 insertions(+), 18 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 3f75156..7978c0a 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -13,6 +13,10 @@ Notes: you need to setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt in start menu) before running scons. + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + * Value - Removed experimental ValueAllocator, it caused static diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 8b0efa2..ba70329 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -90,11 +90,37 @@ Notes that the documentation is also available for download as a tarball. The documentation of the latest release is available online at: http://jsoncpp.sourceforge.net/ +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. * Using json-cpp in your project: =============================== -include/ should be added to your compiler include path. jsoncpp headers should be included as follow: +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + #include diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 6344729..24991d5 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -28,6 +28,12 @@ /// instead of C assert macro. # define JSON_USE_EXCEPTION 1 +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + # ifdef JSON_IN_CPPTL # include # ifndef JSON_USE_CPPTL diff --git a/trunk/jsoncpp/include/json/features.h b/trunk/jsoncpp/include/json/features.h index fd8e350..0b53db1 100644 --- a/trunk/jsoncpp/include/json/features.h +++ b/trunk/jsoncpp/include/json/features.h @@ -6,7 +6,9 @@ #ifndef CPPTL_JSON_FEATURES_H_INCLUDED # define CPPTL_JSON_FEATURES_H_INCLUDED +#if !defined(JSON_IS_AMALGATED) # include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) namespace Json { diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index dd32fa0..083d44f 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -6,7 +6,9 @@ #ifndef JSON_FORWARDS_H_INCLUDED # define JSON_FORWARDS_H_INCLUDED +#if !defined(JSON_IS_AMALGATED) # include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) namespace Json { diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 2cd94eb..13de15c 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -6,8 +6,10 @@ #ifndef CPPTL_JSON_READER_H_INCLUDED # define CPPTL_JSON_READER_H_INCLUDED +#if !defined(JSON_IS_AMALGATED) # include "features.h" # include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) # include # include # include diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 14464e4..66821ab 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -6,7 +6,9 @@ #ifndef CPPTL_JSON_H_INCLUDED # define CPPTL_JSON_H_INCLUDED +#if !defined(JSON_IS_AMALGATED) # include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) # include # include diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 2ee13de..cb0bd9b 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -6,7 +6,9 @@ #ifndef JSON_WRITER_H_INCLUDED # define JSON_WRITER_H_INCLUDED +#if !defined(JSON_IS_AMALGATED) # include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) # include # include # include diff --git a/trunk/jsoncpp/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl index 66d838e..3a532ad 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalarray.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalarray.inl @@ -4,7 +4,8 @@ // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE // included by json_value.cpp -// everything is within Json namespace + +namespace Json { // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -451,3 +452,5 @@ ValueInternalArray::compare( const ValueInternalArray &other ) const } return 0; } + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_internalmap.inl b/trunk/jsoncpp/src/lib_json/json_internalmap.inl index d0dd62a..f2fa160 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalmap.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalmap.inl @@ -4,7 +4,8 @@ // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE // included by json_value.cpp -// everything is within Json namespace + +namespace Json { // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -610,3 +611,5 @@ ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) increment( it ); return offset; } + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 0d59c46..7c594e2 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -3,9 +3,11 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#include -#include -#include "json_tool.h" +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) #include #include #include diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index ce1dec3..c810417 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -3,9 +3,14 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) #include -#include -#include #include #include #include @@ -14,9 +19,6 @@ # include #endif #include // size_t -#ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -#endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw @@ -70,6 +72,7 @@ releaseStringValue( char *value ) free( value ); } +} // namespace Json // ////////////////////////////////////////////////////////////////// @@ -79,13 +82,16 @@ releaseStringValue( char *value ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -#ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -#endif // JSON_VALUE_USE_INTERNAL_MAP +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP # include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) +namespace Json { // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index bd7c8d2..7457ca3 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -4,8 +4,8 @@ // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE // included by json_value.cpp -// everything is within Json namespace +namespace Json { // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -295,3 +295,5 @@ ValueIterator::operator =( const SelfType &other ) copy( other ); return *this; } + +} // namespace Json diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index f101cbc..8c4c180 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -3,8 +3,10 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#include -#include "json_tool.h" +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) #include #include #include From 9d49f488a3fbcba5456ac03ba48a79e76f9a083e Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:36:55 +0000 Subject: [PATCH 166/268] Release test-0.6.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@166 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/makerelease.py | 11 ++++++++++- trunk/jsoncpp/version | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index 9b59180..a6e330e 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -6,7 +6,7 @@ python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.5.0 test-0.6.0-dev +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev Example of invocation when doing a release: python makerelease.py 0.5.0 0.6.0-dev @@ -23,6 +23,7 @@ import os import time from devtools import antglob, fixeol, tarball +import amalgate SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' @@ -322,6 +323,14 @@ def main(): print 'Generating source tarball to', source_tarball_path tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + # Decompress source tarball, download and install scons-local distcheck_dir = 'dist/distcheck' distcheck_top_dir = distcheck_dir + '/' + source_dir diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 7defe1e..718dcfc 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.6.0-dev \ No newline at end of file +test-0.6.0 \ No newline at end of file From 3fae23ddfc4cbe61c08b22ab0849dd8345853805 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:38:07 +0000 Subject: [PATCH 167/268] Release test-0.6.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@167 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/test-0.6.0/AUTHORS | 1 + tags/jsoncpp/test-0.6.0/LICENSE | 55 + tags/jsoncpp/test-0.6.0/NEWS.txt | 95 + tags/jsoncpp/test-0.6.0/README.txt | 172 ++ tags/jsoncpp/test-0.6.0/SConstruct | 248 ++ tags/jsoncpp/test-0.6.0/devtools/__init__.py | 1 + tags/jsoncpp/test-0.6.0/devtools/antglob.py | 201 ++ tags/jsoncpp/test-0.6.0/devtools/fixeol.py | 63 + .../test-0.6.0/devtools/licenseupdater.py | 93 + tags/jsoncpp/test-0.6.0/devtools/tarball.py | 53 + tags/jsoncpp/test-0.6.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/test-0.6.0/doc/footer.html | 23 + tags/jsoncpp/test-0.6.0/doc/header.html | 24 + tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox | 126 + tags/jsoncpp/test-0.6.0/doc/readme.txt | 1 + tags/jsoncpp/test-0.6.0/doc/roadmap.dox | 45 + tags/jsoncpp/test-0.6.0/doxybuild.py | 169 ++ .../test-0.6.0/include/json/autolink.h | 24 + tags/jsoncpp/test-0.6.0/include/json/config.h | 96 + .../test-0.6.0/include/json/features.h | 49 + .../test-0.6.0/include/json/forwards.h | 44 + tags/jsoncpp/test-0.6.0/include/json/json.h | 15 + tags/jsoncpp/test-0.6.0/include/json/reader.h | 212 ++ tags/jsoncpp/test-0.6.0/include/json/value.h | 1103 +++++++++ tags/jsoncpp/test-0.6.0/include/json/writer.h | 185 ++ .../test-0.6.0/makefiles/vs71/jsoncpp.sln | 46 + .../test-0.6.0/makefiles/vs71/jsontest.vcproj | 119 + .../test-0.6.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/test-0.6.0/makerelease.py | 380 +++ .../test-0.6.0/scons-tools/globtool.py | 53 + .../jsoncpp/test-0.6.0/scons-tools/srcdist.py | 179 ++ .../test-0.6.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/test-0.6.0/scons-tools/targz.py | 82 + .../test-0.6.0/src/jsontestrunner/main.cpp | 269 +++ .../test-0.6.0/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../test-0.6.0/src/lib_json/json_reader.cpp | 880 +++++++ .../test-0.6.0/src/lib_json/json_tool.h | 93 + .../test-0.6.0/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../test-0.6.0/src/lib_json/json_writer.cpp | 838 +++++++ .../test-0.6.0/src/lib_json/sconscript | 8 + .../test-0.6.0/src/test_lib_json/jsontest.cpp | 608 +++++ .../test-0.6.0/src/test_lib_json/jsontest.h | 259 ++ .../test-0.6.0/src/test_lib_json/main.cpp | 271 +++ .../test-0.6.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/test-0.6.0/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../test-0.6.0/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../test-0.6.0/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../test-0.6.0/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../test-0.6.0/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../test-0.6.0/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../test-0.6.0/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../test-0.6.0/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../test-0.6.0/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../test-0.6.0/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../test-0.6.0/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../test-0.6.0/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../test-0.6.0/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../test-0.6.0/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../test-0.6.0/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../test-0.6.0/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../test-0.6.0/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../test-0.6.0/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../test-0.6.0/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../test-0.6.0/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../test-0.6.0/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../test-0.6.0/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../test-0.6.0/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../test-0.6.0/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../test-0.6.0/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../test-0.6.0/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../test-0.6.0/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../test-0.6.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../test/data/test_real_01.expected | 2 + .../test-0.6.0/test/data/test_real_01.json | 3 + .../test/data/test_real_02.expected | 2 + .../test-0.6.0/test/data/test_real_02.json | 3 + .../test/data/test_real_03.expected | 2 + .../test-0.6.0/test/data/test_real_03.json | 3 + .../test/data/test_real_04.expected | 2 + .../test-0.6.0/test/data/test_real_04.json | 3 + .../test/data/test_real_05.expected | 3 + .../test-0.6.0/test/data/test_real_05.json | 3 + .../test/data/test_real_06.expected | 3 + .../test-0.6.0/test/data/test_real_06.json | 3 + .../test/data/test_real_07.expected | 3 + .../test-0.6.0/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../test-0.6.0/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../test-0.6.0/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../test-0.6.0/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../test-0.6.0/test/generate_expected.py | 11 + .../test-0.6.0/test/jsonchecker/fail1.json | 1 + .../test-0.6.0/test/jsonchecker/fail10.json | 1 + .../test-0.6.0/test/jsonchecker/fail11.json | 1 + .../test-0.6.0/test/jsonchecker/fail12.json | 1 + .../test-0.6.0/test/jsonchecker/fail13.json | 1 + .../test-0.6.0/test/jsonchecker/fail14.json | 1 + .../test-0.6.0/test/jsonchecker/fail15.json | 1 + .../test-0.6.0/test/jsonchecker/fail16.json | 1 + .../test-0.6.0/test/jsonchecker/fail17.json | 1 + .../test-0.6.0/test/jsonchecker/fail18.json | 1 + .../test-0.6.0/test/jsonchecker/fail19.json | 1 + .../test-0.6.0/test/jsonchecker/fail2.json | 1 + .../test-0.6.0/test/jsonchecker/fail20.json | 1 + .../test-0.6.0/test/jsonchecker/fail21.json | 1 + .../test-0.6.0/test/jsonchecker/fail22.json | 1 + .../test-0.6.0/test/jsonchecker/fail23.json | 1 + .../test-0.6.0/test/jsonchecker/fail24.json | 1 + .../test-0.6.0/test/jsonchecker/fail25.json | 1 + .../test-0.6.0/test/jsonchecker/fail26.json | 1 + .../test-0.6.0/test/jsonchecker/fail27.json | 2 + .../test-0.6.0/test/jsonchecker/fail28.json | 2 + .../test-0.6.0/test/jsonchecker/fail29.json | 1 + .../test-0.6.0/test/jsonchecker/fail3.json | 1 + .../test-0.6.0/test/jsonchecker/fail30.json | 1 + .../test-0.6.0/test/jsonchecker/fail31.json | 1 + .../test-0.6.0/test/jsonchecker/fail32.json | 1 + .../test-0.6.0/test/jsonchecker/fail33.json | 1 + .../test-0.6.0/test/jsonchecker/fail4.json | 1 + .../test-0.6.0/test/jsonchecker/fail5.json | 1 + .../test-0.6.0/test/jsonchecker/fail6.json | 1 + .../test-0.6.0/test/jsonchecker/fail7.json | 1 + .../test-0.6.0/test/jsonchecker/fail8.json | 1 + .../test-0.6.0/test/jsonchecker/fail9.json | 1 + .../test-0.6.0/test/jsonchecker/pass1.json | 58 + .../test-0.6.0/test/jsonchecker/pass2.json | 1 + .../test-0.6.0/test/jsonchecker/pass3.json | 6 + .../test-0.6.0/test/jsonchecker/readme.txt | 3 + .../test-0.6.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/test-0.6.0/test/runjsontests.py | 134 ++ tags/jsoncpp/test-0.6.0/test/rununittests.py | 73 + tags/jsoncpp/test-0.6.0/version | 1 + 185 files changed, 15363 insertions(+) create mode 100644 tags/jsoncpp/test-0.6.0/AUTHORS create mode 100644 tags/jsoncpp/test-0.6.0/LICENSE create mode 100644 tags/jsoncpp/test-0.6.0/NEWS.txt create mode 100644 tags/jsoncpp/test-0.6.0/README.txt create mode 100644 tags/jsoncpp/test-0.6.0/SConstruct create mode 100644 tags/jsoncpp/test-0.6.0/devtools/__init__.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/antglob.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/tarball.py create mode 100644 tags/jsoncpp/test-0.6.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/test-0.6.0/doc/footer.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/header.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/test-0.6.0/doc/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/test-0.6.0/doxybuild.py create mode 100644 tags/jsoncpp/test-0.6.0/include/json/autolink.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/config.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/features.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/forwards.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/json.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/reader.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/value.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/writer.h create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makerelease.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/test/cleantests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/generate_expected.py create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/test-0.6.0/test/runjsontests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/rununittests.py create mode 100644 tags/jsoncpp/test-0.6.0/version diff --git a/tags/jsoncpp/test-0.6.0/AUTHORS b/tags/jsoncpp/test-0.6.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/test-0.6.0/LICENSE b/tags/jsoncpp/test-0.6.0/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/test-0.6.0/NEWS.txt b/tags/jsoncpp/test-0.6.0/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/test-0.6.0/README.txt b/tags/jsoncpp/test-0.6.0/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/test-0.6.0/SConstruct b/tags/jsoncpp/test-0.6.0/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/__init__.py b/tags/jsoncpp/test-0.6.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/devtools/antglob.py b/tags/jsoncpp/test-0.6.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/tarball.py b/tags/jsoncpp/test-0.6.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/test-0.6.0/doc/footer.html b/tags/jsoncpp/test-0.6.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/footer.html @@ -0,0 +1,23 @@ +
+
- + JsonCpp project page
+ + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/test-0.6.0/doc/header.html b/tags/jsoncpp/test-0.6.0/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/test-0.6.0/doc/readme.txt b/tags/jsoncpp/test-0.6.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox new file mode 100644 index 0000000..1ec0ab6 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox @@ -0,0 +1,45 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_64bits Adds support for 64 bits integer + There has been many request to add support for 64 bits integer. Use case for this are: + - time is nowdays commonly represented with a 64 bits integer + - 64 bits integer are frequently used as primary key id in many systems + + Plans to add support is: + - must be optional, a configuration option since not all platforms provides 64 bits integer types. + - move definition of Int and UInt from forwards.h to config.h, with the required platform magic. + - C++ defines no standard to define 64 bits integer. Rely on msvc extension, and long long type that + is widely supported. + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/test-0.6.0/doxybuild.py b/tags/jsoncpp/test-0.6.0/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/include/json/autolink.h b/tags/jsoncpp/test-0.6.0/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/config.h b/tags/jsoncpp/test-0.6.0/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/features.h b/tags/jsoncpp/test-0.6.0/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/forwards.h b/tags/jsoncpp/test-0.6.0/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/json.h b/tags/jsoncpp/test-0.6.0/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/reader.h b/tags/jsoncpp/test-0.6.0/include/json/reader.h new file mode 100644 index 0000000..13de15c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/reader.h @@ -0,0 +1,212 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/value.h b/tags/jsoncpp/test-0.6.0/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/writer.h b/tags/jsoncpp/test-0.6.0/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makerelease.py b/tags/jsoncpp/test-0.6.0/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/test/cleantests.py b/tags/jsoncpp/test-0.6.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/generate_expected.py b/tags/jsoncpp/test-0.6.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/test-0.6.0/test/runjsontests.py b/tags/jsoncpp/test-0.6.0/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/test/rununittests.py b/tags/jsoncpp/test-0.6.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/version b/tags/jsoncpp/test-0.6.0/version new file mode 100644 index 0000000..718dcfc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/version @@ -0,0 +1 @@ +test-0.6.0 \ No newline at end of file From c143bbb095b257d63c6d2da13f22e1d6b4674c1e Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:39:25 +0000 Subject: [PATCH 168/268] Removing tag due to failed testing git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@168 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/test-0.6.0/AUTHORS | 1 - tags/jsoncpp/test-0.6.0/LICENSE | 55 - tags/jsoncpp/test-0.6.0/NEWS.txt | 95 - tags/jsoncpp/test-0.6.0/README.txt | 172 -- tags/jsoncpp/test-0.6.0/SConstruct | 248 -- tags/jsoncpp/test-0.6.0/devtools/__init__.py | 1 - tags/jsoncpp/test-0.6.0/devtools/antglob.py | 201 -- tags/jsoncpp/test-0.6.0/devtools/fixeol.py | 63 - .../test-0.6.0/devtools/licenseupdater.py | 93 - tags/jsoncpp/test-0.6.0/devtools/tarball.py | 53 - tags/jsoncpp/test-0.6.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/test-0.6.0/doc/footer.html | 23 - tags/jsoncpp/test-0.6.0/doc/header.html | 24 - tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox | 126 - tags/jsoncpp/test-0.6.0/doc/readme.txt | 1 - tags/jsoncpp/test-0.6.0/doc/roadmap.dox | 45 - tags/jsoncpp/test-0.6.0/doxybuild.py | 169 -- .../test-0.6.0/include/json/autolink.h | 24 - tags/jsoncpp/test-0.6.0/include/json/config.h | 96 - .../test-0.6.0/include/json/features.h | 49 - .../test-0.6.0/include/json/forwards.h | 44 - tags/jsoncpp/test-0.6.0/include/json/json.h | 15 - tags/jsoncpp/test-0.6.0/include/json/reader.h | 212 -- tags/jsoncpp/test-0.6.0/include/json/value.h | 1103 --------- tags/jsoncpp/test-0.6.0/include/json/writer.h | 185 -- .../test-0.6.0/makefiles/vs71/jsoncpp.sln | 46 - .../test-0.6.0/makefiles/vs71/jsontest.vcproj | 119 - .../test-0.6.0/makefiles/vs71/lib_json.vcproj | 214 -- .../makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/test-0.6.0/makerelease.py | 380 --- .../test-0.6.0/scons-tools/globtool.py | 53 - .../jsoncpp/test-0.6.0/scons-tools/srcdist.py | 179 -- .../test-0.6.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/test-0.6.0/scons-tools/targz.py | 82 - .../test-0.6.0/src/jsontestrunner/main.cpp | 269 --- .../test-0.6.0/src/jsontestrunner/sconscript | 9 - .../src/lib_json/json_batchallocator.h | 130 - .../src/lib_json/json_internalarray.inl | 456 ---- .../src/lib_json/json_internalmap.inl | 615 ----- .../test-0.6.0/src/lib_json/json_reader.cpp | 880 ------- .../test-0.6.0/src/lib_json/json_tool.h | 93 - .../test-0.6.0/src/lib_json/json_value.cpp | 1847 -------------- .../src/lib_json/json_valueiterator.inl | 299 --- .../test-0.6.0/src/lib_json/json_writer.cpp | 838 ------- .../test-0.6.0/src/lib_json/sconscript | 8 - .../test-0.6.0/src/test_lib_json/jsontest.cpp | 608 ----- .../test-0.6.0/src/test_lib_json/jsontest.h | 259 -- .../test-0.6.0/src/test_lib_json/main.cpp | 271 --- .../test-0.6.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/test-0.6.0/test/cleantests.py | 10 - .../test/data/fail_test_array_01.json | 1 - .../test/data/test_array_01.expected | 1 - .../test-0.6.0/test/data/test_array_01.json | 1 - .../test/data/test_array_02.expected | 2 - .../test-0.6.0/test/data/test_array_02.json | 1 - .../test/data/test_array_03.expected | 6 - .../test-0.6.0/test/data/test_array_03.json | 1 - .../test/data/test_array_04.expected | 5 - .../test-0.6.0/test/data/test_array_04.json | 1 - .../test/data/test_array_05.expected | 100 - .../test-0.6.0/test/data/test_array_05.json | 1 - .../test/data/test_array_06.expected | 5 - .../test-0.6.0/test/data/test_array_06.json | 4 - .../test/data/test_basic_01.expected | 1 - .../test-0.6.0/test/data/test_basic_01.json | 1 - .../test/data/test_basic_02.expected | 1 - .../test-0.6.0/test/data/test_basic_02.json | 1 - .../test/data/test_basic_03.expected | 3 - .../test-0.6.0/test/data/test_basic_03.json | 3 - .../test/data/test_basic_04.expected | 2 - .../test-0.6.0/test/data/test_basic_04.json | 2 - .../test/data/test_basic_05.expected | 2 - .../test-0.6.0/test/data/test_basic_05.json | 2 - .../test/data/test_basic_06.expected | 2 - .../test-0.6.0/test/data/test_basic_06.json | 2 - .../test/data/test_basic_07.expected | 2 - .../test-0.6.0/test/data/test_basic_07.json | 2 - .../test/data/test_basic_08.expected | 2 - .../test-0.6.0/test/data/test_basic_08.json | 3 - .../test/data/test_basic_09.expected | 2 - .../test-0.6.0/test/data/test_basic_09.json | 4 - .../test/data/test_comment_01.expected | 8 - .../test-0.6.0/test/data/test_comment_01.json | 8 - .../test/data/test_complex_01.expected | 20 - .../test-0.6.0/test/data/test_complex_01.json | 17 - .../test/data/test_integer_01.expected | 1 - .../test-0.6.0/test/data/test_integer_01.json | 2 - .../test/data/test_integer_02.expected | 1 - .../test-0.6.0/test/data/test_integer_02.json | 2 - .../test/data/test_integer_03.expected | 1 - .../test-0.6.0/test/data/test_integer_03.json | 2 - .../test/data/test_integer_04.expected | 2 - .../test-0.6.0/test/data/test_integer_04.json | 3 - .../test/data/test_integer_05.expected | 2 - .../test-0.6.0/test/data/test_integer_05.json | 2 - .../test/data/test_integer_06_64bits.expected | 1 - .../test/data/test_integer_06_64bits.json | 2 - .../test/data/test_integer_07_64bits.expected | 1 - .../test/data/test_integer_07_64bits.json | 2 - .../test/data/test_integer_08_64bits.expected | 1 - .../test/data/test_integer_08_64bits.json | 2 - .../test/data/test_large_01.expected | 2122 ----------------- .../test-0.6.0/test/data/test_large_01.json | 2 - .../test/data/test_object_01.expected | 1 - .../test-0.6.0/test/data/test_object_01.json | 1 - .../test/data/test_object_02.expected | 2 - .../test-0.6.0/test/data/test_object_02.json | 1 - .../test/data/test_object_03.expected | 4 - .../test-0.6.0/test/data/test_object_03.json | 5 - .../test/data/test_object_04.expected | 2 - .../test-0.6.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../test/data/test_real_01.expected | 2 - .../test-0.6.0/test/data/test_real_01.json | 3 - .../test/data/test_real_02.expected | 2 - .../test-0.6.0/test/data/test_real_02.json | 3 - .../test/data/test_real_03.expected | 2 - .../test-0.6.0/test/data/test_real_03.json | 3 - .../test/data/test_real_04.expected | 2 - .../test-0.6.0/test/data/test_real_04.json | 3 - .../test/data/test_real_05.expected | 3 - .../test-0.6.0/test/data/test_real_05.json | 3 - .../test/data/test_real_06.expected | 3 - .../test-0.6.0/test/data/test_real_06.json | 3 - .../test/data/test_real_07.expected | 3 - .../test-0.6.0/test/data/test_real_07.json | 3 - .../test/data/test_string_01.expected | 1 - .../test-0.6.0/test/data/test_string_01.json | 1 - .../test/data/test_string_02.expected | 1 - .../test-0.6.0/test/data/test_string_02.json | 1 - .../test/data/test_string_03.expected | 1 - .../test-0.6.0/test/data/test_string_03.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - .../test-0.6.0/test/generate_expected.py | 11 - .../test-0.6.0/test/jsonchecker/fail1.json | 1 - .../test-0.6.0/test/jsonchecker/fail10.json | 1 - .../test-0.6.0/test/jsonchecker/fail11.json | 1 - .../test-0.6.0/test/jsonchecker/fail12.json | 1 - .../test-0.6.0/test/jsonchecker/fail13.json | 1 - .../test-0.6.0/test/jsonchecker/fail14.json | 1 - .../test-0.6.0/test/jsonchecker/fail15.json | 1 - .../test-0.6.0/test/jsonchecker/fail16.json | 1 - .../test-0.6.0/test/jsonchecker/fail17.json | 1 - .../test-0.6.0/test/jsonchecker/fail18.json | 1 - .../test-0.6.0/test/jsonchecker/fail19.json | 1 - .../test-0.6.0/test/jsonchecker/fail2.json | 1 - .../test-0.6.0/test/jsonchecker/fail20.json | 1 - .../test-0.6.0/test/jsonchecker/fail21.json | 1 - .../test-0.6.0/test/jsonchecker/fail22.json | 1 - .../test-0.6.0/test/jsonchecker/fail23.json | 1 - .../test-0.6.0/test/jsonchecker/fail24.json | 1 - .../test-0.6.0/test/jsonchecker/fail25.json | 1 - .../test-0.6.0/test/jsonchecker/fail26.json | 1 - .../test-0.6.0/test/jsonchecker/fail27.json | 2 - .../test-0.6.0/test/jsonchecker/fail28.json | 2 - .../test-0.6.0/test/jsonchecker/fail29.json | 1 - .../test-0.6.0/test/jsonchecker/fail3.json | 1 - .../test-0.6.0/test/jsonchecker/fail30.json | 1 - .../test-0.6.0/test/jsonchecker/fail31.json | 1 - .../test-0.6.0/test/jsonchecker/fail32.json | 1 - .../test-0.6.0/test/jsonchecker/fail33.json | 1 - .../test-0.6.0/test/jsonchecker/fail4.json | 1 - .../test-0.6.0/test/jsonchecker/fail5.json | 1 - .../test-0.6.0/test/jsonchecker/fail6.json | 1 - .../test-0.6.0/test/jsonchecker/fail7.json | 1 - .../test-0.6.0/test/jsonchecker/fail8.json | 1 - .../test-0.6.0/test/jsonchecker/fail9.json | 1 - .../test-0.6.0/test/jsonchecker/pass1.json | 58 - .../test-0.6.0/test/jsonchecker/pass2.json | 1 - .../test-0.6.0/test/jsonchecker/pass3.json | 6 - .../test-0.6.0/test/jsonchecker/readme.txt | 3 - .../test-0.6.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/test-0.6.0/test/runjsontests.py | 134 -- tags/jsoncpp/test-0.6.0/test/rununittests.py | 73 - tags/jsoncpp/test-0.6.0/version | 1 - 185 files changed, 15363 deletions(-) delete mode 100644 tags/jsoncpp/test-0.6.0/AUTHORS delete mode 100644 tags/jsoncpp/test-0.6.0/LICENSE delete mode 100644 tags/jsoncpp/test-0.6.0/NEWS.txt delete mode 100644 tags/jsoncpp/test-0.6.0/README.txt delete mode 100644 tags/jsoncpp/test-0.6.0/SConstruct delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/test-0.6.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/test-0.6.0/doc/footer.html delete mode 100644 tags/jsoncpp/test-0.6.0/doc/header.html delete mode 100644 tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/test-0.6.0/doc/readme.txt delete mode 100644 tags/jsoncpp/test-0.6.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/test-0.6.0/doxybuild.py delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/config.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/features.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/json.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/reader.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/value.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/writer.h delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makerelease.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/test/cleantests.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/rununittests.py delete mode 100644 tags/jsoncpp/test-0.6.0/version diff --git a/tags/jsoncpp/test-0.6.0/AUTHORS b/tags/jsoncpp/test-0.6.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/test-0.6.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/test-0.6.0/LICENSE b/tags/jsoncpp/test-0.6.0/LICENSE deleted file mode 100644 index ca2bfe1..0000000 --- a/tags/jsoncpp/test-0.6.0/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/tags/jsoncpp/test-0.6.0/NEWS.txt b/tags/jsoncpp/test-0.6.0/NEWS.txt deleted file mode 100644 index 7978c0a..0000000 --- a/tags/jsoncpp/test-0.6.0/NEWS.txt +++ /dev/null @@ -1,95 +0,0 @@ - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/test-0.6.0/README.txt b/tags/jsoncpp/test-0.6.0/README.txt deleted file mode 100644 index ba70329..0000000 --- a/tags/jsoncpp/test-0.6.0/README.txt +++ /dev/null @@ -1,172 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate -JSON value, handle serialization and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - msvc90 Microsoft Visual Studio 2008 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -Notes: if you are building with Microsoft Visual Studio 2008, you need to -setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) -before running scons. - -Adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -Notes that test can be run by scons using the 'check' target (see above). - -You need to run test manually only if you are troubleshooting an issue. - -In the instruction below, replace "path to jsontest.exe" with the path -of the 'jsontest' executable that was compiled on your platform. - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - -Notes that the documentation is also available for download as a tarball. -The documentation of the latest release is available online at: -http://jsoncpp.sourceforge.net/ - -* Generating amalgated source and header - ====================================== - -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion in an existing project. - -The amalgated source can be generated at any time by running the following -command from the top-directory (requires python 2.6): - -python amalgate.py - -It is possible to specify header name. See -h options for detail. By default, -the following files are generated: -- dist/jsoncpp.cpp: source file that need to be added to your project -- dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgated source. This header -only depends on standard headers. -- dist/json/json-forwards.h: header the provides forward declaration -of all JsonCpp types. This typically what should be included in headers to -speed-up compilation. - -The amalgated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of -other headers. - -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. - -* License - ======= - -See file LICENSE for details. Basically JsonCpp is licensed under -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/test-0.6.0/SConstruct b/tags/jsoncpp/test-0.6.0/SConstruct deleted file mode 100644 index 23225cb..0000000 --- a/tags/jsoncpp/test-0.6.0/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/test-0.6.0/devtools/__init__.py b/tags/jsoncpp/test-0.6.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/devtools/antglob.py b/tags/jsoncpp/test-0.6.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py deleted file mode 100644 index 03e0467..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/tags/jsoncpp/test-0.6.0/devtools/tarball.py b/tags/jsoncpp/test-0.6.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/test-0.6.0/doc/footer.html b/tags/jsoncpp/test-0.6.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/test-0.6.0/doc/header.html b/tags/jsoncpp/test-0.6.0/doc/header.html deleted file mode 100644 index 1a6ad61..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox deleted file mode 100644 index 97cc108..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox +++ /dev/null @@ -1,126 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space": true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _pbuild Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _news What's New? -The description of latest changes can be found in -NEWS.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest NEWS.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -See file LICENSE in the top-directory of the project. - -Basically JsonCpp is licensed under MIT license, or public domain if desired -and recognized in your jurisdiction. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/test-0.6.0/doc/readme.txt b/tags/jsoncpp/test-0.6.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox deleted file mode 100644 index 1ec0ab6..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox +++ /dev/null @@ -1,45 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_64bits Adds support for 64 bits integer - There has been many request to add support for 64 bits integer. Use case for this are: - - time is nowdays commonly represented with a 64 bits integer - - 64 bits integer are frequently used as primary key id in many systems - - Plans to add support is: - - must be optional, a configuration option since not all platforms provides 64 bits integer types. - - move definition of Int and UInt from forwards.h to config.h, with the required platform magic. - - C++ defines no standard to define 64 bits integer. Rely on msvc extension, and long long type that - is widely supported. - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_writer Writter control - Provides more control to determine how specific items are serialized when JSON allow choice: - - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". - - Optionally allow escaping of "/" using "\/". - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/test-0.6.0/doxybuild.py b/tags/jsoncpp/test-0.6.0/doxybuild.py deleted file mode 100644 index 03ad68d..0000000 --- a/tags/jsoncpp/test-0.6.0/doxybuild.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'LICENSE', - 'NEWS.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/include/json/autolink.h b/tags/jsoncpp/test-0.6.0/include/json/autolink.h deleted file mode 100644 index 02328d1..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/autolink.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/config.h b/tags/jsoncpp/test-0.6.0/include/json/config.h deleted file mode 100644 index 24991d5..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/config.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -/// If defined, indicates that the source file is amalgated -/// to prevent private header inclusion. -/// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGATED - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages, and 64 bits integer support is disabled. -// #define JSON_NO_INT64 1 - -#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 -// Microsoft Visual Studio 6 only support conversion from __int64 to double -// (no conversion from unsigned __int64). -#define JSON_USE_INT64_DOUBLE_CONVERSION 1 -#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 - -#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 -/// Indicates that the following function is deprecated. -# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) -#endif - -#if !defined(JSONCPP_DEPRECATED) -# define JSONCPP_DEPRECATED(message) -#endif // if !defined(JSONCPP_DEPRECATED) - -namespace Json { - typedef int Int; - typedef unsigned int UInt; -# if defined(JSON_NO_INT64) - typedef int LargestInt; - typedef unsigned int LargestUInt; -# undef JSON_HAS_INT64 -# else // if defined(JSON_NO_INT64) - // For Microsoft Visual use specific types as long long is not supported -# if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int64; - typedef unsigned __int64 UInt64; -# else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int64; - typedef unsigned long long int UInt64; -# endif // if defined(_MSC_VER) - typedef Int64 LargestInt; - typedef UInt64 LargestUInt; -# define JSON_HAS_INT64 -# endif // if defined(JSON_NO_INT64) -} // end namespace Json - - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/features.h b/tags/jsoncpp/test-0.6.0/include/json/features.h deleted file mode 100644 index 0b53db1..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/features.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/forwards.h b/tags/jsoncpp/test-0.6.0/include/json/forwards.h deleted file mode 100644 index 083d44f..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/forwards.h +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "config.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef unsigned int ArrayIndex; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/json.h b/tags/jsoncpp/test-0.6.0/include/json/json.h deleted file mode 100644 index da5fc96..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/json.h +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/reader.h b/tags/jsoncpp/test-0.6.0/include/json/reader.h deleted file mode 100644 index 13de15c..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/reader.h +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "features.h" -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - * \deprecated Use getFormattedErrorMessages() instead (typo fix). - */ - JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") - std::string getFormatedErrorMessages() const; - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormattedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/value.h b/tags/jsoncpp/test-0.6.0/include/json/value.h deleted file mode 100644 index 66821ab..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/value.h +++ /dev/null @@ -1,1103 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; -# if defined(JSON_HAS_INT64) - typedef Json::UInt64 UInt64; - typedef Json::Int64 Int64; -#endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; - typedef Json::ArrayIndex ArrayIndex; - - static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. - static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. - static const LargestUInt maxLargestUInt; - - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. - static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. - static const UInt maxUInt; - - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. - static const UInt64 maxUInt64; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( ArrayIndex index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - ArrayIndex index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - ArrayIndex index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); -#if defined(JSON_HAS_INT64) - Value( Int64 value ); - Value( UInt64 value ); -#endif // if defined(JSON_HAS_INT64) - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - Int64 asInt64() const; - UInt64 asUInt64() const; - LargestInt asLargestInt() const; - LargestUInt asLargestUInt() const; - float asFloat() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - ArrayIndex size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( ArrayIndex size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( ArrayIndex index ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( int index ); - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( ArrayIndex index ) const; - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( int index ) const; - - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( ArrayIndex index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( ArrayIndex index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - LargestInt int_; - LargestUInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( ArrayIndex index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - ArrayIndex index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/writer.h b/tags/jsoncpp/test-0.6.0/include/json/writer.h deleted file mode 100644 index cb0bd9b..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/writer.h +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - -# if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); -# endif // if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( LargestInt value ); - std::string JSON_API valueToString( LargestUInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makerelease.py b/tags/jsoncpp/test-0.6.0/makerelease.py deleted file mode 100644 index a6e330e..0000000 --- a/tags/jsoncpp/test-0.6.0/makerelease.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev - -When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball -import amalgate - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir - print 'Generating amalgated source tarball to', amalgated_tarball_path - amalgated_dir = 'dist/amalgated' - amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) - amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version - tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], - amalgated_dir, prefix_dir=amalgated_source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp deleted file mode 100644 index dfb6150..0000000 --- a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -/* This executable is used for testing parser/writer using real JSON files. - */ - - -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormattedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - - -static void -printConfig() -{ - // Print the configuration used to compile JsonCpp -#if defined(JSON_NO_INT64) - printf( "JSON_NO_INT64=1\n" ); -#else - printf( "JSON_NO_INT64=0\n" ); -#endif -} - - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( std::string(argv[1]) == "--json-config" ) - { - printConfig(); - return 3; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - try - { - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - } - catch ( const std::exception &e ) - { - printf( "Unhandled exception:\n%s\n", e.what() ); - exitCode = 1; - } - - return exitCode; -} - diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 173e2ed..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 3a532ad..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index f2fa160..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 7c594e2..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,880 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - // Attempts to parse the number as an integer. If the number is - // larger than the maximum supported value of an integer then - // we decode the number as a double. - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) - : Value::maxLargestUInt; - Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::LargestUInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - Value::UInt digit(c - '0'); - if ( value >= threshold ) - { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) - { - return decodeDouble( token ); - } - } - value = value * 10 + digit; - } - if ( isNegative ) - currentValue() = -Value::LargestInt( value ); - else if ( value <= Value::LargestUInt(Value::maxInt) ) - currentValue() = Value::LargestInt( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize+1]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -// Deprecated. Preserved for backward compatibility -std::string -Reader::getFormatedErrorMessages() const -{ - return getFormattedErrorMessages(); -} - - -std::string -Reader::getFormattedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h deleted file mode 100644 index 658031b..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(LargestUInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( LargestUInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp deleted file mode 100644 index c810417..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1847 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); -const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); -const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); -const UInt64 Value::maxUInt64 = UInt64(-1); -const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); -const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); -const LargestUInt Value::maxLargestUInt = LargestUInt(-1); - - -/// Unknown size marker -enum { unknown = (unsigned)-1 }; - - -/** Duplicates the specified string value. - * @param value Pointer to the string to duplicate. Must be zero-terminated if - * length is "unknown". - * @param length Length of the value. if equals to unknown, then it will be - * computed using strlen(value). - * @return Pointer on the duplicate instance of string. - */ -static inline char * -duplicateStringValue( const char *value, - unsigned int length = unknown ) -{ - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; -} - - -/** Free the string duplicated by duplicateStringValue(). - */ -static inline void -releaseStringValue( char *value ) -{ - if ( value ) - free( value ); -} - -} // namespace Json - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGATED) -# ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -# endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - releaseStringValue( comment_ ); - JSON_ASSERT( text != 0 ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( ArrayIndex index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? duplicateStringValue(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? duplicateStringValue( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - releaseStringValue( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -ArrayIndex -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -#if defined(JSON_HAS_INT64) -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - -#endif // if defined(JSON_HAS_INT64) - - -Value::Value( Int64 value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt64 value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( beginValue, - (unsigned int)(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); - return Int(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); - return Int(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); - return UInt(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); - return UInt(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -# if defined(JSON_HAS_INT64) - -Value::Int64 -Value::asInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt64 -Value::asUInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} -# endif // if defined(JSON_HAS_INT64) - - -LargestInt -Value::asLargestInt() const -{ -#if defined(JSON_NO_INT64) - return asInt(); -#else - return asInt64(); -#endif -} - - -LargestUInt -Value::asLargestUInt() const -{ -#if defined(JSON_NO_INT64) - return asUInt(); -#else - return asUInt64(); -#endif -} - - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -float -Value::asFloat() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0f; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return static_cast( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0.0f; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -ArrayIndex -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return ArrayIndex( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( ArrayIndex newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ArrayIndex oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( ArrayIndex index = newSize; index < oldSize; ++index ) - { - value_.map_->erase( index ); - } - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( ArrayIndex index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -Value & -Value::operator[]( int index ) -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -const Value & -Value::operator[]( ArrayIndex index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -const Value & -Value::operator[]( int index ) const -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( ArrayIndex index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( ArrayIndex index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - ArrayIndex index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + ArrayIndex(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 7457ca3..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp deleted file mode 100644 index 8c4c180..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} - - -std::string valueToString( LargestInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( LargestUInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( LargestUInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -#if defined(JSON_HAS_INT64) - -std::string valueToString( Int value ) -{ - return valueToString( LargestInt(value) ); -} - - -std::string valueToString( UInt value ) -{ - return valueToString( LargestUInt(value) ); -} - -#endif // # if defined(JSON_HAS_INT64) - - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asLargestInt() ); - break; - case uintValue: - document_ += valueToString( value.asLargestUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index 02e7b21..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 0d07238..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( expr ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp deleted file mode 100644 index de64200..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value float_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , float_( 0.00390625f ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/test/cleantests.py b/tags/jsoncpp/test-0.6.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json deleted file mode 100644 index 900fcc2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[ 1 2 3] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected deleted file mode 100644 index bc9520a1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json deleted file mode 100644 index 360d660..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -9223372036854775808 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected deleted file mode 100644 index 39eb798..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=-9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json deleted file mode 100644 index 11d8513..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ --9223372036854775808 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected deleted file mode 100644 index 831f432..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=18446744073709551615 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json deleted file mode 100644 index 6e1fb04..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -18446744073709551615 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected deleted file mode 100644 index 6ed627a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json deleted file mode 100644 index f0a220f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json +++ /dev/null @@ -1 +0,0 @@ -"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/generate_expected.py b/tags/jsoncpp/test-0.6.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/test-0.6.0/test/runjsontests.py b/tags/jsoncpp/test-0.6.0/test/runjsontests.py deleted file mode 100644 index ffe8bd5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - is_json_checker_test = (input_path in test_jsonchecker) or expect_failure - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/test/rununittests.py b/tags/jsoncpp/test-0.6.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/version b/tags/jsoncpp/test-0.6.0/version deleted file mode 100644 index 718dcfc..0000000 --- a/tags/jsoncpp/test-0.6.0/version +++ /dev/null @@ -1 +0,0 @@ -test-0.6.0 \ No newline at end of file From 7ca204c2c490fff1dbbcd48b05d43d6cdb5ff637 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:50:44 +0000 Subject: [PATCH 169/268] Release test-0.6.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@169 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 12 +----------- trunk/jsoncpp/include/json/reader.h | 4 +++- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index 1ec0ab6..c7f14d5 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -1,14 +1,4 @@ /*! \page roadmap JsonCpp roadmap - \section ms_64bits Adds support for 64 bits integer - There has been many request to add support for 64 bits integer. Use case for this are: - - time is nowdays commonly represented with a 64 bits integer - - 64 bits integer are frequently used as primary key id in many systems - - Plans to add support is: - - must be optional, a configuration option since not all platforms provides 64 bits integer types. - - move definition of Int and UInt from forwards.h to config.h, with the required platform magic. - - C++ defines no standard to define 64 bits integer. Rely on msvc extension, and long long type that - is widely supported. \section ms_release Makes JsonCpp ready for release - Build system clean-up: - Fix build on Windows (shared-library build is broken) @@ -34,7 +24,7 @@ Some typical use-case involve an application specific structure to/from a JSON document. - Event base parser to allow unserializing a Json document directly in datastructure instead of using the intermediate Json::Value. - - "Stream" based parser to serialized a Json document without using Json::Value as input. + - Stream based parser to serialized a Json document without using Json::Value as input. - Performance oriented parser/writer: - Provides an event based parser. Should allow pulling & skipping events for ease of use. - Provides a JSON document builder: fast only. diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 13de15c..5e4c32a 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -51,7 +51,9 @@ namespace Json { bool collectComments = true ); /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. * \param root [out] Contains the root value of the document if it was * successfully parsed. * \param collectComments \c true to collect comment and allow writing them back during From 68487593e6a1b565282589d1327c106bfe3e273b Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:51:58 +0000 Subject: [PATCH 170/268] Release test-0.6.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@170 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/test-0.6.0/AUTHORS | 1 + tags/jsoncpp/test-0.6.0/LICENSE | 55 + tags/jsoncpp/test-0.6.0/NEWS.txt | 95 + tags/jsoncpp/test-0.6.0/README.txt | 172 ++ tags/jsoncpp/test-0.6.0/SConstruct | 248 ++ tags/jsoncpp/test-0.6.0/devtools/__init__.py | 1 + tags/jsoncpp/test-0.6.0/devtools/antglob.py | 201 ++ tags/jsoncpp/test-0.6.0/devtools/fixeol.py | 63 + .../test-0.6.0/devtools/licenseupdater.py | 93 + tags/jsoncpp/test-0.6.0/devtools/tarball.py | 53 + tags/jsoncpp/test-0.6.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/test-0.6.0/doc/footer.html | 23 + tags/jsoncpp/test-0.6.0/doc/header.html | 24 + tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox | 126 + tags/jsoncpp/test-0.6.0/doc/readme.txt | 1 + tags/jsoncpp/test-0.6.0/doc/roadmap.dox | 35 + tags/jsoncpp/test-0.6.0/doxybuild.py | 169 ++ .../test-0.6.0/include/json/autolink.h | 24 + tags/jsoncpp/test-0.6.0/include/json/config.h | 96 + .../test-0.6.0/include/json/features.h | 49 + .../test-0.6.0/include/json/forwards.h | 44 + tags/jsoncpp/test-0.6.0/include/json/json.h | 15 + tags/jsoncpp/test-0.6.0/include/json/reader.h | 214 ++ tags/jsoncpp/test-0.6.0/include/json/value.h | 1103 +++++++++ tags/jsoncpp/test-0.6.0/include/json/writer.h | 185 ++ .../test-0.6.0/makefiles/vs71/jsoncpp.sln | 46 + .../test-0.6.0/makefiles/vs71/jsontest.vcproj | 119 + .../test-0.6.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/test-0.6.0/makerelease.py | 380 +++ .../test-0.6.0/scons-tools/globtool.py | 53 + .../jsoncpp/test-0.6.0/scons-tools/srcdist.py | 179 ++ .../test-0.6.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/test-0.6.0/scons-tools/targz.py | 82 + .../test-0.6.0/src/jsontestrunner/main.cpp | 269 +++ .../test-0.6.0/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../test-0.6.0/src/lib_json/json_reader.cpp | 880 +++++++ .../test-0.6.0/src/lib_json/json_tool.h | 93 + .../test-0.6.0/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../test-0.6.0/src/lib_json/json_writer.cpp | 838 +++++++ .../test-0.6.0/src/lib_json/sconscript | 8 + .../test-0.6.0/src/test_lib_json/jsontest.cpp | 608 +++++ .../test-0.6.0/src/test_lib_json/jsontest.h | 259 ++ .../test-0.6.0/src/test_lib_json/main.cpp | 271 +++ .../test-0.6.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/test-0.6.0/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../test-0.6.0/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../test-0.6.0/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../test-0.6.0/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../test-0.6.0/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../test-0.6.0/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../test-0.6.0/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../test-0.6.0/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../test-0.6.0/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../test-0.6.0/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../test-0.6.0/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../test-0.6.0/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../test-0.6.0/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../test-0.6.0/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../test-0.6.0/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../test-0.6.0/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../test-0.6.0/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../test-0.6.0/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../test-0.6.0/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../test-0.6.0/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../test-0.6.0/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../test-0.6.0/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../test-0.6.0/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../test-0.6.0/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../test-0.6.0/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../test-0.6.0/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../test-0.6.0/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../test-0.6.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../test/data/test_real_01.expected | 2 + .../test-0.6.0/test/data/test_real_01.json | 3 + .../test/data/test_real_02.expected | 2 + .../test-0.6.0/test/data/test_real_02.json | 3 + .../test/data/test_real_03.expected | 2 + .../test-0.6.0/test/data/test_real_03.json | 3 + .../test/data/test_real_04.expected | 2 + .../test-0.6.0/test/data/test_real_04.json | 3 + .../test/data/test_real_05.expected | 3 + .../test-0.6.0/test/data/test_real_05.json | 3 + .../test/data/test_real_06.expected | 3 + .../test-0.6.0/test/data/test_real_06.json | 3 + .../test/data/test_real_07.expected | 3 + .../test-0.6.0/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../test-0.6.0/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../test-0.6.0/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../test-0.6.0/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../test-0.6.0/test/generate_expected.py | 11 + .../test-0.6.0/test/jsonchecker/fail1.json | 1 + .../test-0.6.0/test/jsonchecker/fail10.json | 1 + .../test-0.6.0/test/jsonchecker/fail11.json | 1 + .../test-0.6.0/test/jsonchecker/fail12.json | 1 + .../test-0.6.0/test/jsonchecker/fail13.json | 1 + .../test-0.6.0/test/jsonchecker/fail14.json | 1 + .../test-0.6.0/test/jsonchecker/fail15.json | 1 + .../test-0.6.0/test/jsonchecker/fail16.json | 1 + .../test-0.6.0/test/jsonchecker/fail17.json | 1 + .../test-0.6.0/test/jsonchecker/fail18.json | 1 + .../test-0.6.0/test/jsonchecker/fail19.json | 1 + .../test-0.6.0/test/jsonchecker/fail2.json | 1 + .../test-0.6.0/test/jsonchecker/fail20.json | 1 + .../test-0.6.0/test/jsonchecker/fail21.json | 1 + .../test-0.6.0/test/jsonchecker/fail22.json | 1 + .../test-0.6.0/test/jsonchecker/fail23.json | 1 + .../test-0.6.0/test/jsonchecker/fail24.json | 1 + .../test-0.6.0/test/jsonchecker/fail25.json | 1 + .../test-0.6.0/test/jsonchecker/fail26.json | 1 + .../test-0.6.0/test/jsonchecker/fail27.json | 2 + .../test-0.6.0/test/jsonchecker/fail28.json | 2 + .../test-0.6.0/test/jsonchecker/fail29.json | 1 + .../test-0.6.0/test/jsonchecker/fail3.json | 1 + .../test-0.6.0/test/jsonchecker/fail30.json | 1 + .../test-0.6.0/test/jsonchecker/fail31.json | 1 + .../test-0.6.0/test/jsonchecker/fail32.json | 1 + .../test-0.6.0/test/jsonchecker/fail33.json | 1 + .../test-0.6.0/test/jsonchecker/fail4.json | 1 + .../test-0.6.0/test/jsonchecker/fail5.json | 1 + .../test-0.6.0/test/jsonchecker/fail6.json | 1 + .../test-0.6.0/test/jsonchecker/fail7.json | 1 + .../test-0.6.0/test/jsonchecker/fail8.json | 1 + .../test-0.6.0/test/jsonchecker/fail9.json | 1 + .../test-0.6.0/test/jsonchecker/pass1.json | 58 + .../test-0.6.0/test/jsonchecker/pass2.json | 1 + .../test-0.6.0/test/jsonchecker/pass3.json | 6 + .../test-0.6.0/test/jsonchecker/readme.txt | 3 + .../test-0.6.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/test-0.6.0/test/runjsontests.py | 134 ++ tags/jsoncpp/test-0.6.0/test/rununittests.py | 73 + tags/jsoncpp/test-0.6.0/version | 1 + 185 files changed, 15355 insertions(+) create mode 100644 tags/jsoncpp/test-0.6.0/AUTHORS create mode 100644 tags/jsoncpp/test-0.6.0/LICENSE create mode 100644 tags/jsoncpp/test-0.6.0/NEWS.txt create mode 100644 tags/jsoncpp/test-0.6.0/README.txt create mode 100644 tags/jsoncpp/test-0.6.0/SConstruct create mode 100644 tags/jsoncpp/test-0.6.0/devtools/__init__.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/antglob.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/tarball.py create mode 100644 tags/jsoncpp/test-0.6.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/test-0.6.0/doc/footer.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/header.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/test-0.6.0/doc/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/test-0.6.0/doxybuild.py create mode 100644 tags/jsoncpp/test-0.6.0/include/json/autolink.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/config.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/features.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/forwards.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/json.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/reader.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/value.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/writer.h create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makerelease.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/test/cleantests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/generate_expected.py create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/test-0.6.0/test/runjsontests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/rununittests.py create mode 100644 tags/jsoncpp/test-0.6.0/version diff --git a/tags/jsoncpp/test-0.6.0/AUTHORS b/tags/jsoncpp/test-0.6.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/test-0.6.0/LICENSE b/tags/jsoncpp/test-0.6.0/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/test-0.6.0/NEWS.txt b/tags/jsoncpp/test-0.6.0/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/test-0.6.0/README.txt b/tags/jsoncpp/test-0.6.0/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/test-0.6.0/SConstruct b/tags/jsoncpp/test-0.6.0/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/__init__.py b/tags/jsoncpp/test-0.6.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/devtools/antglob.py b/tags/jsoncpp/test-0.6.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/tarball.py b/tags/jsoncpp/test-0.6.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/test-0.6.0/doc/footer.html b/tags/jsoncpp/test-0.6.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/test-0.6.0/doc/header.html b/tags/jsoncpp/test-0.6.0/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/test-0.6.0/doc/readme.txt b/tags/jsoncpp/test-0.6.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox new file mode 100644 index 0000000..c7f14d5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox @@ -0,0 +1,35 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/test-0.6.0/doxybuild.py b/tags/jsoncpp/test-0.6.0/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/include/json/autolink.h b/tags/jsoncpp/test-0.6.0/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/config.h b/tags/jsoncpp/test-0.6.0/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/features.h b/tags/jsoncpp/test-0.6.0/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/forwards.h b/tags/jsoncpp/test-0.6.0/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/json.h b/tags/jsoncpp/test-0.6.0/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/reader.h b/tags/jsoncpp/test-0.6.0/include/json/reader.h new file mode 100644 index 0000000..5e4c32a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/value.h b/tags/jsoncpp/test-0.6.0/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/writer.h b/tags/jsoncpp/test-0.6.0/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makerelease.py b/tags/jsoncpp/test-0.6.0/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/test/cleantests.py b/tags/jsoncpp/test-0.6.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/generate_expected.py b/tags/jsoncpp/test-0.6.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/test-0.6.0/test/runjsontests.py b/tags/jsoncpp/test-0.6.0/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/test/rununittests.py b/tags/jsoncpp/test-0.6.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/version b/tags/jsoncpp/test-0.6.0/version new file mode 100644 index 0000000..718dcfc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/version @@ -0,0 +1 @@ +test-0.6.0 \ No newline at end of file From cdd2c9c8ef09e71e6ec7ab761254b8d9b0cfa508 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 20:53:12 +0000 Subject: [PATCH 171/268] Removing tag due to failed testing git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@171 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/test-0.6.0/AUTHORS | 1 - tags/jsoncpp/test-0.6.0/LICENSE | 55 - tags/jsoncpp/test-0.6.0/NEWS.txt | 95 - tags/jsoncpp/test-0.6.0/README.txt | 172 -- tags/jsoncpp/test-0.6.0/SConstruct | 248 -- tags/jsoncpp/test-0.6.0/devtools/__init__.py | 1 - tags/jsoncpp/test-0.6.0/devtools/antglob.py | 201 -- tags/jsoncpp/test-0.6.0/devtools/fixeol.py | 63 - .../test-0.6.0/devtools/licenseupdater.py | 93 - tags/jsoncpp/test-0.6.0/devtools/tarball.py | 53 - tags/jsoncpp/test-0.6.0/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/test-0.6.0/doc/footer.html | 23 - tags/jsoncpp/test-0.6.0/doc/header.html | 24 - tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox | 126 - tags/jsoncpp/test-0.6.0/doc/readme.txt | 1 - tags/jsoncpp/test-0.6.0/doc/roadmap.dox | 35 - tags/jsoncpp/test-0.6.0/doxybuild.py | 169 -- .../test-0.6.0/include/json/autolink.h | 24 - tags/jsoncpp/test-0.6.0/include/json/config.h | 96 - .../test-0.6.0/include/json/features.h | 49 - .../test-0.6.0/include/json/forwards.h | 44 - tags/jsoncpp/test-0.6.0/include/json/json.h | 15 - tags/jsoncpp/test-0.6.0/include/json/reader.h | 214 -- tags/jsoncpp/test-0.6.0/include/json/value.h | 1103 --------- tags/jsoncpp/test-0.6.0/include/json/writer.h | 185 -- .../test-0.6.0/makefiles/vs71/jsoncpp.sln | 46 - .../test-0.6.0/makefiles/vs71/jsontest.vcproj | 119 - .../test-0.6.0/makefiles/vs71/lib_json.vcproj | 214 -- .../makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/test-0.6.0/makerelease.py | 380 --- .../test-0.6.0/scons-tools/globtool.py | 53 - .../jsoncpp/test-0.6.0/scons-tools/srcdist.py | 179 -- .../test-0.6.0/scons-tools/substinfile.py | 79 - tags/jsoncpp/test-0.6.0/scons-tools/targz.py | 82 - .../test-0.6.0/src/jsontestrunner/main.cpp | 269 --- .../test-0.6.0/src/jsontestrunner/sconscript | 9 - .../src/lib_json/json_batchallocator.h | 130 - .../src/lib_json/json_internalarray.inl | 456 ---- .../src/lib_json/json_internalmap.inl | 615 ----- .../test-0.6.0/src/lib_json/json_reader.cpp | 880 ------- .../test-0.6.0/src/lib_json/json_tool.h | 93 - .../test-0.6.0/src/lib_json/json_value.cpp | 1847 -------------- .../src/lib_json/json_valueiterator.inl | 299 --- .../test-0.6.0/src/lib_json/json_writer.cpp | 838 ------- .../test-0.6.0/src/lib_json/sconscript | 8 - .../test-0.6.0/src/test_lib_json/jsontest.cpp | 608 ----- .../test-0.6.0/src/test_lib_json/jsontest.h | 259 -- .../test-0.6.0/src/test_lib_json/main.cpp | 271 --- .../test-0.6.0/src/test_lib_json/sconscript | 10 - tags/jsoncpp/test-0.6.0/test/cleantests.py | 10 - .../test/data/fail_test_array_01.json | 1 - .../test/data/test_array_01.expected | 1 - .../test-0.6.0/test/data/test_array_01.json | 1 - .../test/data/test_array_02.expected | 2 - .../test-0.6.0/test/data/test_array_02.json | 1 - .../test/data/test_array_03.expected | 6 - .../test-0.6.0/test/data/test_array_03.json | 1 - .../test/data/test_array_04.expected | 5 - .../test-0.6.0/test/data/test_array_04.json | 1 - .../test/data/test_array_05.expected | 100 - .../test-0.6.0/test/data/test_array_05.json | 1 - .../test/data/test_array_06.expected | 5 - .../test-0.6.0/test/data/test_array_06.json | 4 - .../test/data/test_basic_01.expected | 1 - .../test-0.6.0/test/data/test_basic_01.json | 1 - .../test/data/test_basic_02.expected | 1 - .../test-0.6.0/test/data/test_basic_02.json | 1 - .../test/data/test_basic_03.expected | 3 - .../test-0.6.0/test/data/test_basic_03.json | 3 - .../test/data/test_basic_04.expected | 2 - .../test-0.6.0/test/data/test_basic_04.json | 2 - .../test/data/test_basic_05.expected | 2 - .../test-0.6.0/test/data/test_basic_05.json | 2 - .../test/data/test_basic_06.expected | 2 - .../test-0.6.0/test/data/test_basic_06.json | 2 - .../test/data/test_basic_07.expected | 2 - .../test-0.6.0/test/data/test_basic_07.json | 2 - .../test/data/test_basic_08.expected | 2 - .../test-0.6.0/test/data/test_basic_08.json | 3 - .../test/data/test_basic_09.expected | 2 - .../test-0.6.0/test/data/test_basic_09.json | 4 - .../test/data/test_comment_01.expected | 8 - .../test-0.6.0/test/data/test_comment_01.json | 8 - .../test/data/test_complex_01.expected | 20 - .../test-0.6.0/test/data/test_complex_01.json | 17 - .../test/data/test_integer_01.expected | 1 - .../test-0.6.0/test/data/test_integer_01.json | 2 - .../test/data/test_integer_02.expected | 1 - .../test-0.6.0/test/data/test_integer_02.json | 2 - .../test/data/test_integer_03.expected | 1 - .../test-0.6.0/test/data/test_integer_03.json | 2 - .../test/data/test_integer_04.expected | 2 - .../test-0.6.0/test/data/test_integer_04.json | 3 - .../test/data/test_integer_05.expected | 2 - .../test-0.6.0/test/data/test_integer_05.json | 2 - .../test/data/test_integer_06_64bits.expected | 1 - .../test/data/test_integer_06_64bits.json | 2 - .../test/data/test_integer_07_64bits.expected | 1 - .../test/data/test_integer_07_64bits.json | 2 - .../test/data/test_integer_08_64bits.expected | 1 - .../test/data/test_integer_08_64bits.json | 2 - .../test/data/test_large_01.expected | 2122 ----------------- .../test-0.6.0/test/data/test_large_01.json | 2 - .../test/data/test_object_01.expected | 1 - .../test-0.6.0/test/data/test_object_01.json | 1 - .../test/data/test_object_02.expected | 2 - .../test-0.6.0/test/data/test_object_02.json | 1 - .../test/data/test_object_03.expected | 4 - .../test-0.6.0/test/data/test_object_03.json | 5 - .../test/data/test_object_04.expected | 2 - .../test-0.6.0/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../test/data/test_real_01.expected | 2 - .../test-0.6.0/test/data/test_real_01.json | 3 - .../test/data/test_real_02.expected | 2 - .../test-0.6.0/test/data/test_real_02.json | 3 - .../test/data/test_real_03.expected | 2 - .../test-0.6.0/test/data/test_real_03.json | 3 - .../test/data/test_real_04.expected | 2 - .../test-0.6.0/test/data/test_real_04.json | 3 - .../test/data/test_real_05.expected | 3 - .../test-0.6.0/test/data/test_real_05.json | 3 - .../test/data/test_real_06.expected | 3 - .../test-0.6.0/test/data/test_real_06.json | 3 - .../test/data/test_real_07.expected | 3 - .../test-0.6.0/test/data/test_real_07.json | 3 - .../test/data/test_string_01.expected | 1 - .../test-0.6.0/test/data/test_string_01.json | 1 - .../test/data/test_string_02.expected | 1 - .../test-0.6.0/test/data/test_string_02.json | 1 - .../test/data/test_string_03.expected | 1 - .../test-0.6.0/test/data/test_string_03.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - .../test-0.6.0/test/generate_expected.py | 11 - .../test-0.6.0/test/jsonchecker/fail1.json | 1 - .../test-0.6.0/test/jsonchecker/fail10.json | 1 - .../test-0.6.0/test/jsonchecker/fail11.json | 1 - .../test-0.6.0/test/jsonchecker/fail12.json | 1 - .../test-0.6.0/test/jsonchecker/fail13.json | 1 - .../test-0.6.0/test/jsonchecker/fail14.json | 1 - .../test-0.6.0/test/jsonchecker/fail15.json | 1 - .../test-0.6.0/test/jsonchecker/fail16.json | 1 - .../test-0.6.0/test/jsonchecker/fail17.json | 1 - .../test-0.6.0/test/jsonchecker/fail18.json | 1 - .../test-0.6.0/test/jsonchecker/fail19.json | 1 - .../test-0.6.0/test/jsonchecker/fail2.json | 1 - .../test-0.6.0/test/jsonchecker/fail20.json | 1 - .../test-0.6.0/test/jsonchecker/fail21.json | 1 - .../test-0.6.0/test/jsonchecker/fail22.json | 1 - .../test-0.6.0/test/jsonchecker/fail23.json | 1 - .../test-0.6.0/test/jsonchecker/fail24.json | 1 - .../test-0.6.0/test/jsonchecker/fail25.json | 1 - .../test-0.6.0/test/jsonchecker/fail26.json | 1 - .../test-0.6.0/test/jsonchecker/fail27.json | 2 - .../test-0.6.0/test/jsonchecker/fail28.json | 2 - .../test-0.6.0/test/jsonchecker/fail29.json | 1 - .../test-0.6.0/test/jsonchecker/fail3.json | 1 - .../test-0.6.0/test/jsonchecker/fail30.json | 1 - .../test-0.6.0/test/jsonchecker/fail31.json | 1 - .../test-0.6.0/test/jsonchecker/fail32.json | 1 - .../test-0.6.0/test/jsonchecker/fail33.json | 1 - .../test-0.6.0/test/jsonchecker/fail4.json | 1 - .../test-0.6.0/test/jsonchecker/fail5.json | 1 - .../test-0.6.0/test/jsonchecker/fail6.json | 1 - .../test-0.6.0/test/jsonchecker/fail7.json | 1 - .../test-0.6.0/test/jsonchecker/fail8.json | 1 - .../test-0.6.0/test/jsonchecker/fail9.json | 1 - .../test-0.6.0/test/jsonchecker/pass1.json | 58 - .../test-0.6.0/test/jsonchecker/pass2.json | 1 - .../test-0.6.0/test/jsonchecker/pass3.json | 6 - .../test-0.6.0/test/jsonchecker/readme.txt | 3 - .../test-0.6.0/test/pyjsontestrunner.py | 64 - tags/jsoncpp/test-0.6.0/test/runjsontests.py | 134 -- tags/jsoncpp/test-0.6.0/test/rununittests.py | 73 - tags/jsoncpp/test-0.6.0/version | 1 - 185 files changed, 15355 deletions(-) delete mode 100644 tags/jsoncpp/test-0.6.0/AUTHORS delete mode 100644 tags/jsoncpp/test-0.6.0/LICENSE delete mode 100644 tags/jsoncpp/test-0.6.0/NEWS.txt delete mode 100644 tags/jsoncpp/test-0.6.0/README.txt delete mode 100644 tags/jsoncpp/test-0.6.0/SConstruct delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/__init__.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/antglob.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/fixeol.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py delete mode 100644 tags/jsoncpp/test-0.6.0/devtools/tarball.py delete mode 100644 tags/jsoncpp/test-0.6.0/doc/doxyfile.in delete mode 100644 tags/jsoncpp/test-0.6.0/doc/footer.html delete mode 100644 tags/jsoncpp/test-0.6.0/doc/header.html delete mode 100644 tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/test-0.6.0/doc/readme.txt delete mode 100644 tags/jsoncpp/test-0.6.0/doc/roadmap.dox delete mode 100644 tags/jsoncpp/test-0.6.0/doxybuild.py delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/autolink.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/config.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/features.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/forwards.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/json.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/reader.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/value.h delete mode 100644 tags/jsoncpp/test-0.6.0/include/json/writer.h delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/test-0.6.0/makerelease.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/targz.py delete mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/test-0.6.0/test/cleantests.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/generate_expected.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/runjsontests.py delete mode 100644 tags/jsoncpp/test-0.6.0/test/rununittests.py delete mode 100644 tags/jsoncpp/test-0.6.0/version diff --git a/tags/jsoncpp/test-0.6.0/AUTHORS b/tags/jsoncpp/test-0.6.0/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/test-0.6.0/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/test-0.6.0/LICENSE b/tags/jsoncpp/test-0.6.0/LICENSE deleted file mode 100644 index ca2bfe1..0000000 --- a/tags/jsoncpp/test-0.6.0/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/tags/jsoncpp/test-0.6.0/NEWS.txt b/tags/jsoncpp/test-0.6.0/NEWS.txt deleted file mode 100644 index 7978c0a..0000000 --- a/tags/jsoncpp/test-0.6.0/NEWS.txt +++ /dev/null @@ -1,95 +0,0 @@ - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/test-0.6.0/README.txt b/tags/jsoncpp/test-0.6.0/README.txt deleted file mode 100644 index ba70329..0000000 --- a/tags/jsoncpp/test-0.6.0/README.txt +++ /dev/null @@ -1,172 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate -JSON value, handle serialization and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - msvc90 Microsoft Visual Studio 2008 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -Notes: if you are building with Microsoft Visual Studio 2008, you need to -setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) -before running scons. - -Adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -Notes that test can be run by scons using the 'check' target (see above). - -You need to run test manually only if you are troubleshooting an issue. - -In the instruction below, replace "path to jsontest.exe" with the path -of the 'jsontest' executable that was compiled on your platform. - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - -Notes that the documentation is also available for download as a tarball. -The documentation of the latest release is available online at: -http://jsoncpp.sourceforge.net/ - -* Generating amalgated source and header - ====================================== - -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion in an existing project. - -The amalgated source can be generated at any time by running the following -command from the top-directory (requires python 2.6): - -python amalgate.py - -It is possible to specify header name. See -h options for detail. By default, -the following files are generated: -- dist/jsoncpp.cpp: source file that need to be added to your project -- dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgated source. This header -only depends on standard headers. -- dist/json/json-forwards.h: header the provides forward declaration -of all JsonCpp types. This typically what should be included in headers to -speed-up compilation. - -The amalgated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of -other headers. - -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. - -* License - ======= - -See file LICENSE for details. Basically JsonCpp is licensed under -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/test-0.6.0/SConstruct b/tags/jsoncpp/test-0.6.0/SConstruct deleted file mode 100644 index 23225cb..0000000 --- a/tags/jsoncpp/test-0.6.0/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/test-0.6.0/devtools/__init__.py b/tags/jsoncpp/test-0.6.0/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/devtools/antglob.py b/tags/jsoncpp/test-0.6.0/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py deleted file mode 100644 index 03e0467..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/tags/jsoncpp/test-0.6.0/devtools/tarball.py b/tags/jsoncpp/test-0.6.0/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/test-0.6.0/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/test-0.6.0/doc/footer.html b/tags/jsoncpp/test-0.6.0/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/test-0.6.0/doc/header.html b/tags/jsoncpp/test-0.6.0/doc/header.html deleted file mode 100644 index 1a6ad61..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox deleted file mode 100644 index 97cc108..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox +++ /dev/null @@ -1,126 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space": true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _pbuild Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _news What's New? -The description of latest changes can be found in -NEWS.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest NEWS.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -See file LICENSE in the top-directory of the project. - -Basically JsonCpp is licensed under MIT license, or public domain if desired -and recognized in your jurisdiction. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/test-0.6.0/doc/readme.txt b/tags/jsoncpp/test-0.6.0/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox deleted file mode 100644 index c7f14d5..0000000 --- a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox +++ /dev/null @@ -1,35 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_writer Writter control - Provides more control to determine how specific items are serialized when JSON allow choice: - - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". - - Optionally allow escaping of "/" using "\/". - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - Stream based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/test-0.6.0/doxybuild.py b/tags/jsoncpp/test-0.6.0/doxybuild.py deleted file mode 100644 index 03ad68d..0000000 --- a/tags/jsoncpp/test-0.6.0/doxybuild.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'LICENSE', - 'NEWS.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/include/json/autolink.h b/tags/jsoncpp/test-0.6.0/include/json/autolink.h deleted file mode 100644 index 02328d1..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/autolink.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/config.h b/tags/jsoncpp/test-0.6.0/include/json/config.h deleted file mode 100644 index 24991d5..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/config.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -/// If defined, indicates that the source file is amalgated -/// to prevent private header inclusion. -/// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGATED - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages, and 64 bits integer support is disabled. -// #define JSON_NO_INT64 1 - -#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 -// Microsoft Visual Studio 6 only support conversion from __int64 to double -// (no conversion from unsigned __int64). -#define JSON_USE_INT64_DOUBLE_CONVERSION 1 -#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 - -#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 -/// Indicates that the following function is deprecated. -# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) -#endif - -#if !defined(JSONCPP_DEPRECATED) -# define JSONCPP_DEPRECATED(message) -#endif // if !defined(JSONCPP_DEPRECATED) - -namespace Json { - typedef int Int; - typedef unsigned int UInt; -# if defined(JSON_NO_INT64) - typedef int LargestInt; - typedef unsigned int LargestUInt; -# undef JSON_HAS_INT64 -# else // if defined(JSON_NO_INT64) - // For Microsoft Visual use specific types as long long is not supported -# if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int64; - typedef unsigned __int64 UInt64; -# else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int64; - typedef unsigned long long int UInt64; -# endif // if defined(_MSC_VER) - typedef Int64 LargestInt; - typedef UInt64 LargestUInt; -# define JSON_HAS_INT64 -# endif // if defined(JSON_NO_INT64) -} // end namespace Json - - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/features.h b/tags/jsoncpp/test-0.6.0/include/json/features.h deleted file mode 100644 index 0b53db1..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/features.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/forwards.h b/tags/jsoncpp/test-0.6.0/include/json/forwards.h deleted file mode 100644 index 083d44f..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/forwards.h +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "config.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef unsigned int ArrayIndex; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/json.h b/tags/jsoncpp/test-0.6.0/include/json/json.h deleted file mode 100644 index da5fc96..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/json.h +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/reader.h b/tags/jsoncpp/test-0.6.0/include/json/reader.h deleted file mode 100644 index 5e4c32a..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/reader.h +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "features.h" -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. - * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. - \ Must be >= beginDoc. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - * \deprecated Use getFormattedErrorMessages() instead (typo fix). - */ - JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") - std::string getFormatedErrorMessages() const; - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormattedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/value.h b/tags/jsoncpp/test-0.6.0/include/json/value.h deleted file mode 100644 index 66821ab..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/value.h +++ /dev/null @@ -1,1103 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; -# if defined(JSON_HAS_INT64) - typedef Json::UInt64 UInt64; - typedef Json::Int64 Int64; -#endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; - typedef Json::ArrayIndex ArrayIndex; - - static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. - static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. - static const LargestUInt maxLargestUInt; - - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. - static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. - static const UInt maxUInt; - - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. - static const UInt64 maxUInt64; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( ArrayIndex index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - ArrayIndex index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - ArrayIndex index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); -#if defined(JSON_HAS_INT64) - Value( Int64 value ); - Value( UInt64 value ); -#endif // if defined(JSON_HAS_INT64) - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - Int64 asInt64() const; - UInt64 asUInt64() const; - LargestInt asLargestInt() const; - LargestUInt asLargestUInt() const; - float asFloat() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - ArrayIndex size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( ArrayIndex size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( ArrayIndex index ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( int index ); - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( ArrayIndex index ) const; - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( int index ) const; - - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( ArrayIndex index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( ArrayIndex index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - LargestInt int_; - LargestUInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( ArrayIndex index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - ArrayIndex index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/writer.h b/tags/jsoncpp/test-0.6.0/include/json/writer.h deleted file mode 100644 index cb0bd9b..0000000 --- a/tags/jsoncpp/test-0.6.0/include/json/writer.h +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - -# if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); -# endif // if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( LargestInt value ); - std::string JSON_API valueToString( LargestUInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/test-0.6.0/makerelease.py b/tags/jsoncpp/test-0.6.0/makerelease.py deleted file mode 100644 index a6e330e..0000000 --- a/tags/jsoncpp/test-0.6.0/makerelease.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev - -When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball -import amalgate - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir - print 'Generating amalgated source tarball to', amalgated_tarball_path - amalgated_dir = 'dist/amalgated' - amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) - amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version - tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], - amalgated_dir, prefix_dir=amalgated_source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp deleted file mode 100644 index dfb6150..0000000 --- a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -/* This executable is used for testing parser/writer using real JSON files. - */ - - -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormattedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - - -static void -printConfig() -{ - // Print the configuration used to compile JsonCpp -#if defined(JSON_NO_INT64) - printf( "JSON_NO_INT64=1\n" ); -#else - printf( "JSON_NO_INT64=0\n" ); -#endif -} - - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( std::string(argv[1]) == "--json-config" ) - { - printConfig(); - return 3; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - try - { - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - } - catch ( const std::exception &e ) - { - printf( "Unhandled exception:\n%s\n", e.what() ); - exitCode = 1; - } - - return exitCode; -} - diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h deleted file mode 100644 index 173e2ed..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl deleted file mode 100644 index 3a532ad..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl deleted file mode 100644 index f2fa160..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp deleted file mode 100644 index 7c594e2..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,880 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - // Attempts to parse the number as an integer. If the number is - // larger than the maximum supported value of an integer then - // we decode the number as a double. - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) - : Value::maxLargestUInt; - Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::LargestUInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - Value::UInt digit(c - '0'); - if ( value >= threshold ) - { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) - { - return decodeDouble( token ); - } - } - value = value * 10 + digit; - } - if ( isNegative ) - currentValue() = -Value::LargestInt( value ); - else if ( value <= Value::LargestUInt(Value::maxInt) ) - currentValue() = Value::LargestInt( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize+1]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -// Deprecated. Preserved for backward compatibility -std::string -Reader::getFormatedErrorMessages() const -{ - return getFormattedErrorMessages(); -} - - -std::string -Reader::getFormattedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h deleted file mode 100644 index 658031b..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(LargestUInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( LargestUInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp deleted file mode 100644 index c810417..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1847 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); -const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); -const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); -const UInt64 Value::maxUInt64 = UInt64(-1); -const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); -const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); -const LargestUInt Value::maxLargestUInt = LargestUInt(-1); - - -/// Unknown size marker -enum { unknown = (unsigned)-1 }; - - -/** Duplicates the specified string value. - * @param value Pointer to the string to duplicate. Must be zero-terminated if - * length is "unknown". - * @param length Length of the value. if equals to unknown, then it will be - * computed using strlen(value). - * @return Pointer on the duplicate instance of string. - */ -static inline char * -duplicateStringValue( const char *value, - unsigned int length = unknown ) -{ - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; -} - - -/** Free the string duplicated by duplicateStringValue(). - */ -static inline void -releaseStringValue( char *value ) -{ - if ( value ) - free( value ); -} - -} // namespace Json - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGATED) -# ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -# endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - releaseStringValue( comment_ ); - JSON_ASSERT( text != 0 ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( ArrayIndex index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? duplicateStringValue(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? duplicateStringValue( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - releaseStringValue( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -ArrayIndex -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -#if defined(JSON_HAS_INT64) -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - -#endif // if defined(JSON_HAS_INT64) - - -Value::Value( Int64 value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt64 value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( beginValue, - (unsigned int)(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); - return Int(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); - return Int(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); - return UInt(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); - return UInt(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -# if defined(JSON_HAS_INT64) - -Value::Int64 -Value::asInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt64 -Value::asUInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} -# endif // if defined(JSON_HAS_INT64) - - -LargestInt -Value::asLargestInt() const -{ -#if defined(JSON_NO_INT64) - return asInt(); -#else - return asInt64(); -#endif -} - - -LargestUInt -Value::asLargestUInt() const -{ -#if defined(JSON_NO_INT64) - return asUInt(); -#else - return asUInt64(); -#endif -} - - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -float -Value::asFloat() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0f; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return static_cast( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0.0f; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -ArrayIndex -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return ArrayIndex( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( ArrayIndex newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ArrayIndex oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( ArrayIndex index = newSize; index < oldSize; ++index ) - { - value_.map_->erase( index ); - } - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( ArrayIndex index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -Value & -Value::operator[]( int index ) -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -const Value & -Value::operator[]( ArrayIndex index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -const Value & -Value::operator[]( int index ) const -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( ArrayIndex index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( ArrayIndex index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - ArrayIndex index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + ArrayIndex(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 7457ca3..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp deleted file mode 100644 index 8c4c180..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} - - -std::string valueToString( LargestInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( LargestUInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( LargestUInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -#if defined(JSON_HAS_INT64) - -std::string valueToString( Int value ) -{ - return valueToString( LargestInt(value) ); -} - - -std::string valueToString( UInt value ) -{ - return valueToString( LargestUInt(value) ); -} - -#endif // # if defined(JSON_HAS_INT64) - - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asLargestInt() ); - break; - case uintValue: - document_ += valueToString( value.asLargestUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp deleted file mode 100644 index 02e7b21..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h deleted file mode 100644 index 0d07238..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( expr ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp deleted file mode 100644 index de64200..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value float_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , float_( 0.00390625f ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/test/cleantests.py b/tags/jsoncpp/test-0.6.0/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json deleted file mode 100644 index 900fcc2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[ 1 2 3] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected deleted file mode 100644 index bc9520a1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json deleted file mode 100644 index 360d660..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -9223372036854775808 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected deleted file mode 100644 index 39eb798..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=-9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json deleted file mode 100644 index 11d8513..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ --9223372036854775808 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected deleted file mode 100644 index 831f432..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=18446744073709551615 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json deleted file mode 100644 index 6e1fb04..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -18446744073709551615 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected deleted file mode 100644 index 6ed627a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json deleted file mode 100644 index f0a220f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json +++ /dev/null @@ -1 +0,0 @@ -"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/generate_expected.py b/tags/jsoncpp/test-0.6.0/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/test-0.6.0/test/runjsontests.py b/tags/jsoncpp/test-0.6.0/test/runjsontests.py deleted file mode 100644 index ffe8bd5..0000000 --- a/tags/jsoncpp/test-0.6.0/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - is_json_checker_test = (input_path in test_jsonchecker) or expect_failure - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/test/rununittests.py b/tags/jsoncpp/test-0.6.0/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/test-0.6.0/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/test-0.6.0/version b/tags/jsoncpp/test-0.6.0/version deleted file mode 100644 index 718dcfc..0000000 --- a/tags/jsoncpp/test-0.6.0/version +++ /dev/null @@ -1 +0,0 @@ -test-0.6.0 \ No newline at end of file From 0167f24599751cdedd0153008cd07316dded92bc Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 22:07:51 +0000 Subject: [PATCH 172/268] Release test-0.6.0 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@172 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/test-0.6.0/AUTHORS | 1 + tags/jsoncpp/test-0.6.0/LICENSE | 55 + tags/jsoncpp/test-0.6.0/NEWS.txt | 95 + tags/jsoncpp/test-0.6.0/README.txt | 172 ++ tags/jsoncpp/test-0.6.0/SConstruct | 248 ++ tags/jsoncpp/test-0.6.0/devtools/__init__.py | 1 + tags/jsoncpp/test-0.6.0/devtools/antglob.py | 201 ++ tags/jsoncpp/test-0.6.0/devtools/fixeol.py | 63 + .../test-0.6.0/devtools/licenseupdater.py | 93 + tags/jsoncpp/test-0.6.0/devtools/tarball.py | 53 + tags/jsoncpp/test-0.6.0/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/test-0.6.0/doc/footer.html | 23 + tags/jsoncpp/test-0.6.0/doc/header.html | 24 + tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox | 126 + tags/jsoncpp/test-0.6.0/doc/readme.txt | 1 + tags/jsoncpp/test-0.6.0/doc/roadmap.dox | 35 + tags/jsoncpp/test-0.6.0/doxybuild.py | 169 ++ .../test-0.6.0/include/json/autolink.h | 24 + tags/jsoncpp/test-0.6.0/include/json/config.h | 96 + .../test-0.6.0/include/json/features.h | 49 + .../test-0.6.0/include/json/forwards.h | 44 + tags/jsoncpp/test-0.6.0/include/json/json.h | 15 + tags/jsoncpp/test-0.6.0/include/json/reader.h | 214 ++ tags/jsoncpp/test-0.6.0/include/json/value.h | 1103 +++++++++ tags/jsoncpp/test-0.6.0/include/json/writer.h | 185 ++ .../test-0.6.0/makefiles/vs71/jsoncpp.sln | 46 + .../test-0.6.0/makefiles/vs71/jsontest.vcproj | 119 + .../test-0.6.0/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/test-0.6.0/makerelease.py | 380 +++ .../test-0.6.0/scons-tools/globtool.py | 53 + .../jsoncpp/test-0.6.0/scons-tools/srcdist.py | 179 ++ .../test-0.6.0/scons-tools/substinfile.py | 79 + tags/jsoncpp/test-0.6.0/scons-tools/targz.py | 82 + .../test-0.6.0/src/jsontestrunner/main.cpp | 269 +++ .../test-0.6.0/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../test-0.6.0/src/lib_json/json_reader.cpp | 880 +++++++ .../test-0.6.0/src/lib_json/json_tool.h | 93 + .../test-0.6.0/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../test-0.6.0/src/lib_json/json_writer.cpp | 838 +++++++ .../test-0.6.0/src/lib_json/sconscript | 8 + .../test-0.6.0/src/test_lib_json/jsontest.cpp | 608 +++++ .../test-0.6.0/src/test_lib_json/jsontest.h | 259 ++ .../test-0.6.0/src/test_lib_json/main.cpp | 271 +++ .../test-0.6.0/src/test_lib_json/sconscript | 10 + tags/jsoncpp/test-0.6.0/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../test-0.6.0/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../test-0.6.0/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../test-0.6.0/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../test-0.6.0/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../test-0.6.0/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../test-0.6.0/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../test-0.6.0/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../test-0.6.0/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../test-0.6.0/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../test-0.6.0/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../test-0.6.0/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../test-0.6.0/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../test-0.6.0/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../test-0.6.0/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../test-0.6.0/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../test-0.6.0/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../test-0.6.0/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../test-0.6.0/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../test-0.6.0/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../test-0.6.0/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../test-0.6.0/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../test-0.6.0/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../test-0.6.0/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../test-0.6.0/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../test-0.6.0/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../test-0.6.0/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../test-0.6.0/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../test/data/test_real_01.expected | 2 + .../test-0.6.0/test/data/test_real_01.json | 3 + .../test/data/test_real_02.expected | 2 + .../test-0.6.0/test/data/test_real_02.json | 3 + .../test/data/test_real_03.expected | 2 + .../test-0.6.0/test/data/test_real_03.json | 3 + .../test/data/test_real_04.expected | 2 + .../test-0.6.0/test/data/test_real_04.json | 3 + .../test/data/test_real_05.expected | 3 + .../test-0.6.0/test/data/test_real_05.json | 3 + .../test/data/test_real_06.expected | 3 + .../test-0.6.0/test/data/test_real_06.json | 3 + .../test/data/test_real_07.expected | 3 + .../test-0.6.0/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../test-0.6.0/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../test-0.6.0/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../test-0.6.0/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../test-0.6.0/test/generate_expected.py | 11 + .../test-0.6.0/test/jsonchecker/fail1.json | 1 + .../test-0.6.0/test/jsonchecker/fail10.json | 1 + .../test-0.6.0/test/jsonchecker/fail11.json | 1 + .../test-0.6.0/test/jsonchecker/fail12.json | 1 + .../test-0.6.0/test/jsonchecker/fail13.json | 1 + .../test-0.6.0/test/jsonchecker/fail14.json | 1 + .../test-0.6.0/test/jsonchecker/fail15.json | 1 + .../test-0.6.0/test/jsonchecker/fail16.json | 1 + .../test-0.6.0/test/jsonchecker/fail17.json | 1 + .../test-0.6.0/test/jsonchecker/fail18.json | 1 + .../test-0.6.0/test/jsonchecker/fail19.json | 1 + .../test-0.6.0/test/jsonchecker/fail2.json | 1 + .../test-0.6.0/test/jsonchecker/fail20.json | 1 + .../test-0.6.0/test/jsonchecker/fail21.json | 1 + .../test-0.6.0/test/jsonchecker/fail22.json | 1 + .../test-0.6.0/test/jsonchecker/fail23.json | 1 + .../test-0.6.0/test/jsonchecker/fail24.json | 1 + .../test-0.6.0/test/jsonchecker/fail25.json | 1 + .../test-0.6.0/test/jsonchecker/fail26.json | 1 + .../test-0.6.0/test/jsonchecker/fail27.json | 2 + .../test-0.6.0/test/jsonchecker/fail28.json | 2 + .../test-0.6.0/test/jsonchecker/fail29.json | 1 + .../test-0.6.0/test/jsonchecker/fail3.json | 1 + .../test-0.6.0/test/jsonchecker/fail30.json | 1 + .../test-0.6.0/test/jsonchecker/fail31.json | 1 + .../test-0.6.0/test/jsonchecker/fail32.json | 1 + .../test-0.6.0/test/jsonchecker/fail33.json | 1 + .../test-0.6.0/test/jsonchecker/fail4.json | 1 + .../test-0.6.0/test/jsonchecker/fail5.json | 1 + .../test-0.6.0/test/jsonchecker/fail6.json | 1 + .../test-0.6.0/test/jsonchecker/fail7.json | 1 + .../test-0.6.0/test/jsonchecker/fail8.json | 1 + .../test-0.6.0/test/jsonchecker/fail9.json | 1 + .../test-0.6.0/test/jsonchecker/pass1.json | 58 + .../test-0.6.0/test/jsonchecker/pass2.json | 1 + .../test-0.6.0/test/jsonchecker/pass3.json | 6 + .../test-0.6.0/test/jsonchecker/readme.txt | 3 + .../test-0.6.0/test/pyjsontestrunner.py | 64 + tags/jsoncpp/test-0.6.0/test/runjsontests.py | 134 ++ tags/jsoncpp/test-0.6.0/test/rununittests.py | 73 + tags/jsoncpp/test-0.6.0/version | 1 + 185 files changed, 15355 insertions(+) create mode 100644 tags/jsoncpp/test-0.6.0/AUTHORS create mode 100644 tags/jsoncpp/test-0.6.0/LICENSE create mode 100644 tags/jsoncpp/test-0.6.0/NEWS.txt create mode 100644 tags/jsoncpp/test-0.6.0/README.txt create mode 100644 tags/jsoncpp/test-0.6.0/SConstruct create mode 100644 tags/jsoncpp/test-0.6.0/devtools/__init__.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/antglob.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/fixeol.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/test-0.6.0/devtools/tarball.py create mode 100644 tags/jsoncpp/test-0.6.0/doc/doxyfile.in create mode 100644 tags/jsoncpp/test-0.6.0/doc/footer.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/header.html create mode 100644 tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/test-0.6.0/doc/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/doc/roadmap.dox create mode 100644 tags/jsoncpp/test-0.6.0/doxybuild.py create mode 100644 tags/jsoncpp/test-0.6.0/include/json/autolink.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/config.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/features.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/forwards.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/json.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/reader.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/value.h create mode 100644 tags/jsoncpp/test-0.6.0/include/json/writer.h create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/test-0.6.0/makerelease.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/globtool.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/test-0.6.0/scons-tools/targz.py create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/test-0.6.0/test/cleantests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_array_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_large_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_object_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_06.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_real_07.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/test-0.6.0/test/generate_expected.py create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/test-0.6.0/test/runjsontests.py create mode 100644 tags/jsoncpp/test-0.6.0/test/rununittests.py create mode 100644 tags/jsoncpp/test-0.6.0/version diff --git a/tags/jsoncpp/test-0.6.0/AUTHORS b/tags/jsoncpp/test-0.6.0/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/test-0.6.0/LICENSE b/tags/jsoncpp/test-0.6.0/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/test-0.6.0/NEWS.txt b/tags/jsoncpp/test-0.6.0/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/test-0.6.0/README.txt b/tags/jsoncpp/test-0.6.0/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/test-0.6.0/SConstruct b/tags/jsoncpp/test-0.6.0/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/__init__.py b/tags/jsoncpp/test-0.6.0/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/devtools/antglob.py b/tags/jsoncpp/test-0.6.0/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/test-0.6.0/devtools/fixeol.py b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/test-0.6.0/devtools/tarball.py b/tags/jsoncpp/test-0.6.0/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/test-0.6.0/doc/doxyfile.in b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/test-0.6.0/doc/footer.html b/tags/jsoncpp/test-0.6.0/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/test-0.6.0/doc/header.html b/tags/jsoncpp/test-0.6.0/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/test-0.6.0/doc/readme.txt b/tags/jsoncpp/test-0.6.0/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/test-0.6.0/doc/roadmap.dox b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox new file mode 100644 index 0000000..c7f14d5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doc/roadmap.dox @@ -0,0 +1,35 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/test-0.6.0/doxybuild.py b/tags/jsoncpp/test-0.6.0/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/include/json/autolink.h b/tags/jsoncpp/test-0.6.0/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/config.h b/tags/jsoncpp/test-0.6.0/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/features.h b/tags/jsoncpp/test-0.6.0/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/forwards.h b/tags/jsoncpp/test-0.6.0/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/json.h b/tags/jsoncpp/test-0.6.0/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/reader.h b/tags/jsoncpp/test-0.6.0/include/json/reader.h new file mode 100644 index 0000000..5e4c32a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/value.h b/tags/jsoncpp/test-0.6.0/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/include/json/writer.h b/tags/jsoncpp/test-0.6.0/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/test-0.6.0/makerelease.py b/tags/jsoncpp/test-0.6.0/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/test-0.6.0/scons-tools/targz.py b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/test-0.6.0/test/cleantests.py b/tags/jsoncpp/test-0.6.0/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/generate_expected.py b/tags/jsoncpp/test-0.6.0/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/test-0.6.0/test/runjsontests.py b/tags/jsoncpp/test-0.6.0/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/test/rununittests.py b/tags/jsoncpp/test-0.6.0/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/test-0.6.0/version b/tags/jsoncpp/test-0.6.0/version new file mode 100644 index 0000000..718dcfc --- /dev/null +++ b/tags/jsoncpp/test-0.6.0/version @@ -0,0 +1 @@ +test-0.6.0 \ No newline at end of file From 0abef94c9573717b4cbfb72d8635c3f58cb13a40 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 22:11:05 +0000 Subject: [PATCH 173/268] Release 0.6.0-rc1 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@173 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 718dcfc..8d1042e 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -test-0.6.0 \ No newline at end of file +0.6.0-rc1 \ No newline at end of file From aed037fe6e6ee054326147c80b2ca9efe1e70bd8 Mon Sep 17 00:00:00 2001 From: blep Date: Sun, 1 May 2011 22:12:16 +0000 Subject: [PATCH 174/268] Release 0.6.0-rc1 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@174 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc1/AUTHORS | 1 + tags/jsoncpp/0.6.0-rc1/LICENSE | 55 + tags/jsoncpp/0.6.0-rc1/NEWS.txt | 95 + tags/jsoncpp/0.6.0-rc1/README.txt | 172 ++ tags/jsoncpp/0.6.0-rc1/SConstruct | 248 ++ tags/jsoncpp/0.6.0-rc1/devtools/__init__.py | 1 + tags/jsoncpp/0.6.0-rc1/devtools/antglob.py | 201 ++ tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py | 63 + .../0.6.0-rc1/devtools/licenseupdater.py | 93 + tags/jsoncpp/0.6.0-rc1/devtools/tarball.py | 53 + tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.6.0-rc1/doc/footer.html | 23 + tags/jsoncpp/0.6.0-rc1/doc/header.html | 24 + tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox | 126 + tags/jsoncpp/0.6.0-rc1/doc/readme.txt | 1 + tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox | 35 + tags/jsoncpp/0.6.0-rc1/doxybuild.py | 169 ++ .../jsoncpp/0.6.0-rc1/include/json/autolink.h | 24 + tags/jsoncpp/0.6.0-rc1/include/json/config.h | 96 + .../jsoncpp/0.6.0-rc1/include/json/features.h | 49 + .../jsoncpp/0.6.0-rc1/include/json/forwards.h | 44 + tags/jsoncpp/0.6.0-rc1/include/json/json.h | 15 + tags/jsoncpp/0.6.0-rc1/include/json/reader.h | 214 ++ tags/jsoncpp/0.6.0-rc1/include/json/value.h | 1103 +++++++++ tags/jsoncpp/0.6.0-rc1/include/json/writer.h | 185 ++ .../0.6.0-rc1/makefiles/vs71/jsoncpp.sln | 46 + .../0.6.0-rc1/makefiles/vs71/jsontest.vcproj | 119 + .../0.6.0-rc1/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.6.0-rc1/makerelease.py | 380 +++ .../jsoncpp/0.6.0-rc1/scons-tools/globtool.py | 53 + tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py | 179 ++ .../0.6.0-rc1/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py | 82 + .../0.6.0-rc1/src/jsontestrunner/main.cpp | 269 +++ .../0.6.0-rc1/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../0.6.0-rc1/src/lib_json/json_reader.cpp | 880 +++++++ .../0.6.0-rc1/src/lib_json/json_tool.h | 93 + .../0.6.0-rc1/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../0.6.0-rc1/src/lib_json/json_writer.cpp | 838 +++++++ .../jsoncpp/0.6.0-rc1/src/lib_json/sconscript | 8 + .../0.6.0-rc1/src/test_lib_json/jsontest.cpp | 608 +++++ .../0.6.0-rc1/src/test_lib_json/jsontest.h | 259 ++ .../0.6.0-rc1/src/test_lib_json/main.cpp | 271 +++ .../0.6.0-rc1/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.6.0-rc1/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../0.6.0-rc1/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../0.6.0-rc1/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../0.6.0-rc1/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../0.6.0-rc1/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../0.6.0-rc1/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../0.6.0-rc1/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../0.6.0-rc1/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../0.6.0-rc1/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../0.6.0-rc1/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../0.6.0-rc1/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../0.6.0-rc1/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../0.6.0-rc1/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../0.6.0-rc1/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../0.6.0-rc1/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../0.6.0-rc1/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../0.6.0-rc1/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../0.6.0-rc1/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../0.6.0-rc1/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../0.6.0-rc1/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../0.6.0-rc1/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../0.6.0-rc1/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../0.6.0-rc1/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.6.0-rc1/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../0.6.0-rc1/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../0.6.0-rc1/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../0.6.0-rc1/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../0.6.0-rc1/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.6.0-rc1/test/data/test_real_01.expected | 2 + .../0.6.0-rc1/test/data/test_real_01.json | 3 + .../0.6.0-rc1/test/data/test_real_02.expected | 2 + .../0.6.0-rc1/test/data/test_real_02.json | 3 + .../0.6.0-rc1/test/data/test_real_03.expected | 2 + .../0.6.0-rc1/test/data/test_real_03.json | 3 + .../0.6.0-rc1/test/data/test_real_04.expected | 2 + .../0.6.0-rc1/test/data/test_real_04.json | 3 + .../0.6.0-rc1/test/data/test_real_05.expected | 3 + .../0.6.0-rc1/test/data/test_real_05.json | 3 + .../0.6.0-rc1/test/data/test_real_06.expected | 3 + .../0.6.0-rc1/test/data/test_real_06.json | 3 + .../0.6.0-rc1/test/data/test_real_07.expected | 3 + .../0.6.0-rc1/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../0.6.0-rc1/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../0.6.0-rc1/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../0.6.0-rc1/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../0.6.0-rc1/test/generate_expected.py | 11 + .../0.6.0-rc1/test/jsonchecker/fail1.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail10.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail11.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail12.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail13.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail14.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail15.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail16.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail17.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail18.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail19.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail2.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail20.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail21.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail22.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail23.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail24.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail25.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail26.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail27.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail28.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail29.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail3.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail30.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail31.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail32.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail33.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail4.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail5.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail6.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail7.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail8.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail9.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass1.json | 58 + .../0.6.0-rc1/test/jsonchecker/pass2.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass3.json | 6 + .../0.6.0-rc1/test/jsonchecker/readme.txt | 3 + .../0.6.0-rc1/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.6.0-rc1/test/runjsontests.py | 134 ++ tags/jsoncpp/0.6.0-rc1/test/rununittests.py | 73 + tags/jsoncpp/0.6.0-rc1/version | 1 + 185 files changed, 15355 insertions(+) create mode 100644 tags/jsoncpp/0.6.0-rc1/AUTHORS create mode 100644 tags/jsoncpp/0.6.0-rc1/LICENSE create mode 100644 tags/jsoncpp/0.6.0-rc1/NEWS.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/README.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/SConstruct create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/__init__.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/antglob.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/tarball.py create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/footer.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/header.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doxybuild.py create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/autolink.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/config.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/features.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/forwards.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/json.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/reader.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/value.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/writer.h create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makerelease.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/test/cleantests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/generate_expected.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/runjsontests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/rununittests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/version diff --git a/tags/jsoncpp/0.6.0-rc1/AUTHORS b/tags/jsoncpp/0.6.0-rc1/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc1/LICENSE b/tags/jsoncpp/0.6.0-rc1/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/0.6.0-rc1/NEWS.txt b/tags/jsoncpp/0.6.0-rc1/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc1/README.txt b/tags/jsoncpp/0.6.0-rc1/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc1/SConstruct b/tags/jsoncpp/0.6.0-rc1/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc1/doc/footer.html b/tags/jsoncpp/0.6.0-rc1/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.6.0-rc1/doc/header.html b/tags/jsoncpp/0.6.0-rc1/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox new file mode 100644 index 0000000..c7f14d5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox @@ -0,0 +1,35 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doxybuild.py b/tags/jsoncpp/0.6.0-rc1/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/config.h b/tags/jsoncpp/0.6.0-rc1/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/features.h b/tags/jsoncpp/0.6.0-rc1/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/json.h b/tags/jsoncpp/0.6.0-rc1/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h new file mode 100644 index 0000000..5e4c32a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/value.h b/tags/jsoncpp/0.6.0-rc1/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makerelease.py b/tags/jsoncpp/0.6.0-rc1/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/version b/tags/jsoncpp/0.6.0-rc1/version new file mode 100644 index 0000000..8d1042e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/version @@ -0,0 +1 @@ +0.6.0-rc1 \ No newline at end of file From 0a536667ff54ce5da84e8b5fd606d66e216b335f Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 06:30:29 +0000 Subject: [PATCH 175/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@175 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc1/AUTHORS | 1 - tags/jsoncpp/0.6.0-rc1/LICENSE | 55 - tags/jsoncpp/0.6.0-rc1/NEWS.txt | 95 - tags/jsoncpp/0.6.0-rc1/README.txt | 172 -- tags/jsoncpp/0.6.0-rc1/SConstruct | 248 -- tags/jsoncpp/0.6.0-rc1/devtools/__init__.py | 1 - tags/jsoncpp/0.6.0-rc1/devtools/antglob.py | 201 -- tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py | 63 - .../0.6.0-rc1/devtools/licenseupdater.py | 93 - tags/jsoncpp/0.6.0-rc1/devtools/tarball.py | 53 - tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.6.0-rc1/doc/footer.html | 23 - tags/jsoncpp/0.6.0-rc1/doc/header.html | 24 - tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox | 126 - tags/jsoncpp/0.6.0-rc1/doc/readme.txt | 1 - tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox | 35 - tags/jsoncpp/0.6.0-rc1/doxybuild.py | 169 -- .../jsoncpp/0.6.0-rc1/include/json/autolink.h | 24 - tags/jsoncpp/0.6.0-rc1/include/json/config.h | 96 - .../jsoncpp/0.6.0-rc1/include/json/features.h | 49 - .../jsoncpp/0.6.0-rc1/include/json/forwards.h | 44 - tags/jsoncpp/0.6.0-rc1/include/json/json.h | 15 - tags/jsoncpp/0.6.0-rc1/include/json/reader.h | 214 -- tags/jsoncpp/0.6.0-rc1/include/json/value.h | 1103 --------- tags/jsoncpp/0.6.0-rc1/include/json/writer.h | 185 -- .../0.6.0-rc1/makefiles/vs71/jsoncpp.sln | 46 - .../0.6.0-rc1/makefiles/vs71/jsontest.vcproj | 119 - .../0.6.0-rc1/makefiles/vs71/lib_json.vcproj | 214 -- .../makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.6.0-rc1/makerelease.py | 380 --- .../jsoncpp/0.6.0-rc1/scons-tools/globtool.py | 53 - tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py | 179 -- .../0.6.0-rc1/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py | 82 - .../0.6.0-rc1/src/jsontestrunner/main.cpp | 269 --- .../0.6.0-rc1/src/jsontestrunner/sconscript | 9 - .../src/lib_json/json_batchallocator.h | 130 - .../src/lib_json/json_internalarray.inl | 456 ---- .../src/lib_json/json_internalmap.inl | 615 ----- .../0.6.0-rc1/src/lib_json/json_reader.cpp | 880 ------- .../0.6.0-rc1/src/lib_json/json_tool.h | 93 - .../0.6.0-rc1/src/lib_json/json_value.cpp | 1847 -------------- .../src/lib_json/json_valueiterator.inl | 299 --- .../0.6.0-rc1/src/lib_json/json_writer.cpp | 838 ------- .../jsoncpp/0.6.0-rc1/src/lib_json/sconscript | 8 - .../0.6.0-rc1/src/test_lib_json/jsontest.cpp | 608 ----- .../0.6.0-rc1/src/test_lib_json/jsontest.h | 259 -- .../0.6.0-rc1/src/test_lib_json/main.cpp | 271 --- .../0.6.0-rc1/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.6.0-rc1/test/cleantests.py | 10 - .../test/data/fail_test_array_01.json | 1 - .../test/data/test_array_01.expected | 1 - .../0.6.0-rc1/test/data/test_array_01.json | 1 - .../test/data/test_array_02.expected | 2 - .../0.6.0-rc1/test/data/test_array_02.json | 1 - .../test/data/test_array_03.expected | 6 - .../0.6.0-rc1/test/data/test_array_03.json | 1 - .../test/data/test_array_04.expected | 5 - .../0.6.0-rc1/test/data/test_array_04.json | 1 - .../test/data/test_array_05.expected | 100 - .../0.6.0-rc1/test/data/test_array_05.json | 1 - .../test/data/test_array_06.expected | 5 - .../0.6.0-rc1/test/data/test_array_06.json | 4 - .../test/data/test_basic_01.expected | 1 - .../0.6.0-rc1/test/data/test_basic_01.json | 1 - .../test/data/test_basic_02.expected | 1 - .../0.6.0-rc1/test/data/test_basic_02.json | 1 - .../test/data/test_basic_03.expected | 3 - .../0.6.0-rc1/test/data/test_basic_03.json | 3 - .../test/data/test_basic_04.expected | 2 - .../0.6.0-rc1/test/data/test_basic_04.json | 2 - .../test/data/test_basic_05.expected | 2 - .../0.6.0-rc1/test/data/test_basic_05.json | 2 - .../test/data/test_basic_06.expected | 2 - .../0.6.0-rc1/test/data/test_basic_06.json | 2 - .../test/data/test_basic_07.expected | 2 - .../0.6.0-rc1/test/data/test_basic_07.json | 2 - .../test/data/test_basic_08.expected | 2 - .../0.6.0-rc1/test/data/test_basic_08.json | 3 - .../test/data/test_basic_09.expected | 2 - .../0.6.0-rc1/test/data/test_basic_09.json | 4 - .../test/data/test_comment_01.expected | 8 - .../0.6.0-rc1/test/data/test_comment_01.json | 8 - .../test/data/test_complex_01.expected | 20 - .../0.6.0-rc1/test/data/test_complex_01.json | 17 - .../test/data/test_integer_01.expected | 1 - .../0.6.0-rc1/test/data/test_integer_01.json | 2 - .../test/data/test_integer_02.expected | 1 - .../0.6.0-rc1/test/data/test_integer_02.json | 2 - .../test/data/test_integer_03.expected | 1 - .../0.6.0-rc1/test/data/test_integer_03.json | 2 - .../test/data/test_integer_04.expected | 2 - .../0.6.0-rc1/test/data/test_integer_04.json | 3 - .../test/data/test_integer_05.expected | 2 - .../0.6.0-rc1/test/data/test_integer_05.json | 2 - .../test/data/test_integer_06_64bits.expected | 1 - .../test/data/test_integer_06_64bits.json | 2 - .../test/data/test_integer_07_64bits.expected | 1 - .../test/data/test_integer_07_64bits.json | 2 - .../test/data/test_integer_08_64bits.expected | 1 - .../test/data/test_integer_08_64bits.json | 2 - .../test/data/test_large_01.expected | 2122 ----------------- .../0.6.0-rc1/test/data/test_large_01.json | 2 - .../test/data/test_object_01.expected | 1 - .../0.6.0-rc1/test/data/test_object_01.json | 1 - .../test/data/test_object_02.expected | 2 - .../0.6.0-rc1/test/data/test_object_02.json | 1 - .../test/data/test_object_03.expected | 4 - .../0.6.0-rc1/test/data/test_object_03.json | 5 - .../test/data/test_object_04.expected | 2 - .../0.6.0-rc1/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.6.0-rc1/test/data/test_real_01.expected | 2 - .../0.6.0-rc1/test/data/test_real_01.json | 3 - .../0.6.0-rc1/test/data/test_real_02.expected | 2 - .../0.6.0-rc1/test/data/test_real_02.json | 3 - .../0.6.0-rc1/test/data/test_real_03.expected | 2 - .../0.6.0-rc1/test/data/test_real_03.json | 3 - .../0.6.0-rc1/test/data/test_real_04.expected | 2 - .../0.6.0-rc1/test/data/test_real_04.json | 3 - .../0.6.0-rc1/test/data/test_real_05.expected | 3 - .../0.6.0-rc1/test/data/test_real_05.json | 3 - .../0.6.0-rc1/test/data/test_real_06.expected | 3 - .../0.6.0-rc1/test/data/test_real_06.json | 3 - .../0.6.0-rc1/test/data/test_real_07.expected | 3 - .../0.6.0-rc1/test/data/test_real_07.json | 3 - .../test/data/test_string_01.expected | 1 - .../0.6.0-rc1/test/data/test_string_01.json | 1 - .../test/data/test_string_02.expected | 1 - .../0.6.0-rc1/test/data/test_string_02.json | 1 - .../test/data/test_string_03.expected | 1 - .../0.6.0-rc1/test/data/test_string_03.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - .../0.6.0-rc1/test/generate_expected.py | 11 - .../0.6.0-rc1/test/jsonchecker/fail1.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail10.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail11.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail12.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail13.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail14.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail15.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail16.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail17.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail18.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail19.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail2.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail20.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail21.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail22.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail23.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail24.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail25.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail26.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail27.json | 2 - .../0.6.0-rc1/test/jsonchecker/fail28.json | 2 - .../0.6.0-rc1/test/jsonchecker/fail29.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail3.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail30.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail31.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail32.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail33.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail4.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail5.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail6.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail7.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail8.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail9.json | 1 - .../0.6.0-rc1/test/jsonchecker/pass1.json | 58 - .../0.6.0-rc1/test/jsonchecker/pass2.json | 1 - .../0.6.0-rc1/test/jsonchecker/pass3.json | 6 - .../0.6.0-rc1/test/jsonchecker/readme.txt | 3 - .../0.6.0-rc1/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.6.0-rc1/test/runjsontests.py | 134 -- tags/jsoncpp/0.6.0-rc1/test/rununittests.py | 73 - tags/jsoncpp/0.6.0-rc1/version | 1 - 185 files changed, 15355 deletions(-) delete mode 100644 tags/jsoncpp/0.6.0-rc1/AUTHORS delete mode 100644 tags/jsoncpp/0.6.0-rc1/LICENSE delete mode 100644 tags/jsoncpp/0.6.0-rc1/NEWS.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/README.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/SConstruct delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/footer.html delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/header.html delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.6.0-rc1/doxybuild.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/config.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/features.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/json.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/reader.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/value.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/writer.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makerelease.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/cleantests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/rununittests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/version diff --git a/tags/jsoncpp/0.6.0-rc1/AUTHORS b/tags/jsoncpp/0.6.0-rc1/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.6.0-rc1/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc1/LICENSE b/tags/jsoncpp/0.6.0-rc1/LICENSE deleted file mode 100644 index ca2bfe1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/tags/jsoncpp/0.6.0-rc1/NEWS.txt b/tags/jsoncpp/0.6.0-rc1/NEWS.txt deleted file mode 100644 index 7978c0a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/NEWS.txt +++ /dev/null @@ -1,95 +0,0 @@ - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc1/README.txt b/tags/jsoncpp/0.6.0-rc1/README.txt deleted file mode 100644 index ba70329..0000000 --- a/tags/jsoncpp/0.6.0-rc1/README.txt +++ /dev/null @@ -1,172 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate -JSON value, handle serialization and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - msvc90 Microsoft Visual Studio 2008 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -Notes: if you are building with Microsoft Visual Studio 2008, you need to -setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) -before running scons. - -Adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -Notes that test can be run by scons using the 'check' target (see above). - -You need to run test manually only if you are troubleshooting an issue. - -In the instruction below, replace "path to jsontest.exe" with the path -of the 'jsontest' executable that was compiled on your platform. - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - -Notes that the documentation is also available for download as a tarball. -The documentation of the latest release is available online at: -http://jsoncpp.sourceforge.net/ - -* Generating amalgated source and header - ====================================== - -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion in an existing project. - -The amalgated source can be generated at any time by running the following -command from the top-directory (requires python 2.6): - -python amalgate.py - -It is possible to specify header name. See -h options for detail. By default, -the following files are generated: -- dist/jsoncpp.cpp: source file that need to be added to your project -- dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgated source. This header -only depends on standard headers. -- dist/json/json-forwards.h: header the provides forward declaration -of all JsonCpp types. This typically what should be included in headers to -speed-up compilation. - -The amalgated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of -other headers. - -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. - -* License - ======= - -See file LICENSE for details. Basically JsonCpp is licensed under -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc1/SConstruct b/tags/jsoncpp/0.6.0-rc1/SConstruct deleted file mode 100644 index 23225cb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py deleted file mode 100644 index 03e0467..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc1/doc/footer.html b/tags/jsoncpp/0.6.0-rc1/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.6.0-rc1/doc/header.html b/tags/jsoncpp/0.6.0-rc1/doc/header.html deleted file mode 100644 index 1a6ad61..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox deleted file mode 100644 index 97cc108..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox +++ /dev/null @@ -1,126 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space": true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _pbuild Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _news What's New? -The description of latest changes can be found in -NEWS.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest NEWS.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -See file LICENSE in the top-directory of the project. - -Basically JsonCpp is licensed under MIT license, or public domain if desired -and recognized in your jurisdiction. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox deleted file mode 100644 index c7f14d5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox +++ /dev/null @@ -1,35 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_writer Writter control - Provides more control to determine how specific items are serialized when JSON allow choice: - - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". - - Optionally allow escaping of "/" using "\/". - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - Stream based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doxybuild.py b/tags/jsoncpp/0.6.0-rc1/doxybuild.py deleted file mode 100644 index 03ad68d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doxybuild.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'LICENSE', - 'NEWS.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h deleted file mode 100644 index 02328d1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/config.h b/tags/jsoncpp/0.6.0-rc1/include/json/config.h deleted file mode 100644 index 24991d5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/config.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -/// If defined, indicates that the source file is amalgated -/// to prevent private header inclusion. -/// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGATED - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages, and 64 bits integer support is disabled. -// #define JSON_NO_INT64 1 - -#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 -// Microsoft Visual Studio 6 only support conversion from __int64 to double -// (no conversion from unsigned __int64). -#define JSON_USE_INT64_DOUBLE_CONVERSION 1 -#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 - -#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 -/// Indicates that the following function is deprecated. -# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) -#endif - -#if !defined(JSONCPP_DEPRECATED) -# define JSONCPP_DEPRECATED(message) -#endif // if !defined(JSONCPP_DEPRECATED) - -namespace Json { - typedef int Int; - typedef unsigned int UInt; -# if defined(JSON_NO_INT64) - typedef int LargestInt; - typedef unsigned int LargestUInt; -# undef JSON_HAS_INT64 -# else // if defined(JSON_NO_INT64) - // For Microsoft Visual use specific types as long long is not supported -# if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int64; - typedef unsigned __int64 UInt64; -# else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int64; - typedef unsigned long long int UInt64; -# endif // if defined(_MSC_VER) - typedef Int64 LargestInt; - typedef UInt64 LargestUInt; -# define JSON_HAS_INT64 -# endif // if defined(JSON_NO_INT64) -} // end namespace Json - - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/features.h b/tags/jsoncpp/0.6.0-rc1/include/json/features.h deleted file mode 100644 index 0b53db1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/features.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h deleted file mode 100644 index 083d44f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "config.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef unsigned int ArrayIndex; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/json.h b/tags/jsoncpp/0.6.0-rc1/include/json/json.h deleted file mode 100644 index da5fc96..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/json.h +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h deleted file mode 100644 index 5e4c32a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "features.h" -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. - * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. - \ Must be >= beginDoc. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - * \deprecated Use getFormattedErrorMessages() instead (typo fix). - */ - JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") - std::string getFormatedErrorMessages() const; - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormattedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/value.h b/tags/jsoncpp/0.6.0-rc1/include/json/value.h deleted file mode 100644 index 66821ab..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/value.h +++ /dev/null @@ -1,1103 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; -# if defined(JSON_HAS_INT64) - typedef Json::UInt64 UInt64; - typedef Json::Int64 Int64; -#endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; - typedef Json::ArrayIndex ArrayIndex; - - static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. - static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. - static const LargestUInt maxLargestUInt; - - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. - static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. - static const UInt maxUInt; - - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. - static const UInt64 maxUInt64; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( ArrayIndex index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - ArrayIndex index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - ArrayIndex index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); -#if defined(JSON_HAS_INT64) - Value( Int64 value ); - Value( UInt64 value ); -#endif // if defined(JSON_HAS_INT64) - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - Int64 asInt64() const; - UInt64 asUInt64() const; - LargestInt asLargestInt() const; - LargestUInt asLargestUInt() const; - float asFloat() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - ArrayIndex size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( ArrayIndex size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( ArrayIndex index ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( int index ); - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( ArrayIndex index ) const; - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( int index ) const; - - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( ArrayIndex index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( ArrayIndex index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - LargestInt int_; - LargestUInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( ArrayIndex index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - ArrayIndex index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h deleted file mode 100644 index cb0bd9b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - -# if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); -# endif // if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( LargestInt value ); - std::string JSON_API valueToString( LargestUInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makerelease.py b/tags/jsoncpp/0.6.0-rc1/makerelease.py deleted file mode 100644 index a6e330e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makerelease.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev - -When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball -import amalgate - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir - print 'Generating amalgated source tarball to', amalgated_tarball_path - amalgated_dir = 'dist/amalgated' - amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) - amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version - tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], - amalgated_dir, prefix_dir=amalgated_source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp deleted file mode 100644 index dfb6150..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -/* This executable is used for testing parser/writer using real JSON files. - */ - - -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormattedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - - -static void -printConfig() -{ - // Print the configuration used to compile JsonCpp -#if defined(JSON_NO_INT64) - printf( "JSON_NO_INT64=1\n" ); -#else - printf( "JSON_NO_INT64=0\n" ); -#endif -} - - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( std::string(argv[1]) == "--json-config" ) - { - printConfig(); - return 3; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - try - { - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - } - catch ( const std::exception &e ) - { - printf( "Unhandled exception:\n%s\n", e.what() ); - exitCode = 1; - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h deleted file mode 100644 index 173e2ed..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl deleted file mode 100644 index 3a532ad..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl deleted file mode 100644 index f2fa160..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp deleted file mode 100644 index 7c594e2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,880 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - // Attempts to parse the number as an integer. If the number is - // larger than the maximum supported value of an integer then - // we decode the number as a double. - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) - : Value::maxLargestUInt; - Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::LargestUInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - Value::UInt digit(c - '0'); - if ( value >= threshold ) - { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) - { - return decodeDouble( token ); - } - } - value = value * 10 + digit; - } - if ( isNegative ) - currentValue() = -Value::LargestInt( value ); - else if ( value <= Value::LargestUInt(Value::maxInt) ) - currentValue() = Value::LargestInt( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize+1]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -// Deprecated. Preserved for backward compatibility -std::string -Reader::getFormatedErrorMessages() const -{ - return getFormattedErrorMessages(); -} - - -std::string -Reader::getFormattedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h deleted file mode 100644 index 658031b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(LargestUInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( LargestUInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp deleted file mode 100644 index c810417..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1847 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); -const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); -const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); -const UInt64 Value::maxUInt64 = UInt64(-1); -const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); -const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); -const LargestUInt Value::maxLargestUInt = LargestUInt(-1); - - -/// Unknown size marker -enum { unknown = (unsigned)-1 }; - - -/** Duplicates the specified string value. - * @param value Pointer to the string to duplicate. Must be zero-terminated if - * length is "unknown". - * @param length Length of the value. if equals to unknown, then it will be - * computed using strlen(value). - * @return Pointer on the duplicate instance of string. - */ -static inline char * -duplicateStringValue( const char *value, - unsigned int length = unknown ) -{ - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; -} - - -/** Free the string duplicated by duplicateStringValue(). - */ -static inline void -releaseStringValue( char *value ) -{ - if ( value ) - free( value ); -} - -} // namespace Json - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGATED) -# ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -# endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - releaseStringValue( comment_ ); - JSON_ASSERT( text != 0 ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( ArrayIndex index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? duplicateStringValue(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? duplicateStringValue( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - releaseStringValue( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -ArrayIndex -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -#if defined(JSON_HAS_INT64) -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - -#endif // if defined(JSON_HAS_INT64) - - -Value::Value( Int64 value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt64 value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( beginValue, - (unsigned int)(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); - return Int(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); - return Int(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); - return UInt(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); - return UInt(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -# if defined(JSON_HAS_INT64) - -Value::Int64 -Value::asInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt64 -Value::asUInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} -# endif // if defined(JSON_HAS_INT64) - - -LargestInt -Value::asLargestInt() const -{ -#if defined(JSON_NO_INT64) - return asInt(); -#else - return asInt64(); -#endif -} - - -LargestUInt -Value::asLargestUInt() const -{ -#if defined(JSON_NO_INT64) - return asUInt(); -#else - return asUInt64(); -#endif -} - - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -float -Value::asFloat() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0f; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return static_cast( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0.0f; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -ArrayIndex -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return ArrayIndex( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( ArrayIndex newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ArrayIndex oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( ArrayIndex index = newSize; index < oldSize; ++index ) - { - value_.map_->erase( index ); - } - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( ArrayIndex index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -Value & -Value::operator[]( int index ) -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -const Value & -Value::operator[]( ArrayIndex index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -const Value & -Value::operator[]( int index ) const -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( ArrayIndex index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( ArrayIndex index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - ArrayIndex index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + ArrayIndex(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 7457ca3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp deleted file mode 100644 index 8c4c180..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} - - -std::string valueToString( LargestInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( LargestUInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( LargestUInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -#if defined(JSON_HAS_INT64) - -std::string valueToString( Int value ) -{ - return valueToString( LargestInt(value) ); -} - - -std::string valueToString( UInt value ) -{ - return valueToString( LargestUInt(value) ); -} - -#endif // # if defined(JSON_HAS_INT64) - - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asLargestInt() ); - break; - case uintValue: - document_ += valueToString( value.asLargestUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp deleted file mode 100644 index 02e7b21..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h deleted file mode 100644 index 0d07238..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( expr ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp deleted file mode 100644 index de64200..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value float_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , float_( 0.00390625f ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json deleted file mode 100644 index 900fcc2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected deleted file mode 100644 index bc9520a1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json deleted file mode 100644 index 360d660..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected deleted file mode 100644 index 39eb798..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json deleted file mode 100644 index 11d8513..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ --9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected deleted file mode 100644 index 831f432..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json deleted file mode 100644 index 6e1fb04..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -18446744073709551615 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected deleted file mode 100644 index 6ed627a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json deleted file mode 100644 index f0a220f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json +++ /dev/null @@ -1 +0,0 @@ -"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py deleted file mode 100644 index ffe8bd5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - is_json_checker_test = (input_path in test_jsonchecker) or expect_failure - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/version b/tags/jsoncpp/0.6.0-rc1/version deleted file mode 100644 index 8d1042e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/version +++ /dev/null @@ -1 +0,0 @@ -0.6.0-rc1 \ No newline at end of file From db6414bb0a2bc325b8b15e62de370503811aa173 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 06:31:34 +0000 Subject: [PATCH 176/268] Release 0.6.0-rc1 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@176 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc1/AUTHORS | 1 + tags/jsoncpp/0.6.0-rc1/LICENSE | 55 + tags/jsoncpp/0.6.0-rc1/NEWS.txt | 95 + tags/jsoncpp/0.6.0-rc1/README.txt | 172 ++ tags/jsoncpp/0.6.0-rc1/SConstruct | 248 ++ tags/jsoncpp/0.6.0-rc1/devtools/__init__.py | 1 + tags/jsoncpp/0.6.0-rc1/devtools/antglob.py | 201 ++ tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py | 63 + .../0.6.0-rc1/devtools/licenseupdater.py | 93 + tags/jsoncpp/0.6.0-rc1/devtools/tarball.py | 53 + tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.6.0-rc1/doc/footer.html | 23 + tags/jsoncpp/0.6.0-rc1/doc/header.html | 24 + tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox | 126 + tags/jsoncpp/0.6.0-rc1/doc/readme.txt | 1 + tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox | 35 + tags/jsoncpp/0.6.0-rc1/doxybuild.py | 169 ++ .../jsoncpp/0.6.0-rc1/include/json/autolink.h | 24 + tags/jsoncpp/0.6.0-rc1/include/json/config.h | 96 + .../jsoncpp/0.6.0-rc1/include/json/features.h | 49 + .../jsoncpp/0.6.0-rc1/include/json/forwards.h | 44 + tags/jsoncpp/0.6.0-rc1/include/json/json.h | 15 + tags/jsoncpp/0.6.0-rc1/include/json/reader.h | 214 ++ tags/jsoncpp/0.6.0-rc1/include/json/value.h | 1103 +++++++++ tags/jsoncpp/0.6.0-rc1/include/json/writer.h | 185 ++ .../0.6.0-rc1/makefiles/vs71/jsoncpp.sln | 46 + .../0.6.0-rc1/makefiles/vs71/jsontest.vcproj | 119 + .../0.6.0-rc1/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.6.0-rc1/makerelease.py | 380 +++ .../jsoncpp/0.6.0-rc1/scons-tools/globtool.py | 53 + tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py | 179 ++ .../0.6.0-rc1/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py | 82 + .../0.6.0-rc1/src/jsontestrunner/main.cpp | 269 +++ .../0.6.0-rc1/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../0.6.0-rc1/src/lib_json/json_reader.cpp | 880 +++++++ .../0.6.0-rc1/src/lib_json/json_tool.h | 93 + .../0.6.0-rc1/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../0.6.0-rc1/src/lib_json/json_writer.cpp | 838 +++++++ .../jsoncpp/0.6.0-rc1/src/lib_json/sconscript | 8 + .../0.6.0-rc1/src/test_lib_json/jsontest.cpp | 608 +++++ .../0.6.0-rc1/src/test_lib_json/jsontest.h | 259 ++ .../0.6.0-rc1/src/test_lib_json/main.cpp | 271 +++ .../0.6.0-rc1/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.6.0-rc1/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../0.6.0-rc1/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../0.6.0-rc1/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../0.6.0-rc1/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../0.6.0-rc1/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../0.6.0-rc1/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../0.6.0-rc1/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../0.6.0-rc1/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../0.6.0-rc1/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../0.6.0-rc1/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../0.6.0-rc1/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../0.6.0-rc1/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../0.6.0-rc1/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../0.6.0-rc1/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../0.6.0-rc1/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../0.6.0-rc1/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../0.6.0-rc1/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../0.6.0-rc1/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../0.6.0-rc1/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../0.6.0-rc1/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../0.6.0-rc1/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../0.6.0-rc1/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../0.6.0-rc1/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.6.0-rc1/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../0.6.0-rc1/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../0.6.0-rc1/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../0.6.0-rc1/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../0.6.0-rc1/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.6.0-rc1/test/data/test_real_01.expected | 2 + .../0.6.0-rc1/test/data/test_real_01.json | 3 + .../0.6.0-rc1/test/data/test_real_02.expected | 2 + .../0.6.0-rc1/test/data/test_real_02.json | 3 + .../0.6.0-rc1/test/data/test_real_03.expected | 2 + .../0.6.0-rc1/test/data/test_real_03.json | 3 + .../0.6.0-rc1/test/data/test_real_04.expected | 2 + .../0.6.0-rc1/test/data/test_real_04.json | 3 + .../0.6.0-rc1/test/data/test_real_05.expected | 3 + .../0.6.0-rc1/test/data/test_real_05.json | 3 + .../0.6.0-rc1/test/data/test_real_06.expected | 3 + .../0.6.0-rc1/test/data/test_real_06.json | 3 + .../0.6.0-rc1/test/data/test_real_07.expected | 3 + .../0.6.0-rc1/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../0.6.0-rc1/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../0.6.0-rc1/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../0.6.0-rc1/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../0.6.0-rc1/test/generate_expected.py | 11 + .../0.6.0-rc1/test/jsonchecker/fail1.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail10.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail11.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail12.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail13.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail14.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail15.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail16.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail17.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail18.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail19.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail2.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail20.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail21.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail22.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail23.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail24.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail25.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail26.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail27.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail28.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail29.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail3.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail30.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail31.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail32.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail33.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail4.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail5.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail6.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail7.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail8.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail9.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass1.json | 58 + .../0.6.0-rc1/test/jsonchecker/pass2.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass3.json | 6 + .../0.6.0-rc1/test/jsonchecker/readme.txt | 3 + .../0.6.0-rc1/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.6.0-rc1/test/runjsontests.py | 134 ++ tags/jsoncpp/0.6.0-rc1/test/rununittests.py | 73 + tags/jsoncpp/0.6.0-rc1/version | 1 + 185 files changed, 15355 insertions(+) create mode 100644 tags/jsoncpp/0.6.0-rc1/AUTHORS create mode 100644 tags/jsoncpp/0.6.0-rc1/LICENSE create mode 100644 tags/jsoncpp/0.6.0-rc1/NEWS.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/README.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/SConstruct create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/__init__.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/antglob.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/tarball.py create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/footer.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/header.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doxybuild.py create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/autolink.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/config.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/features.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/forwards.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/json.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/reader.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/value.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/writer.h create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makerelease.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/test/cleantests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/generate_expected.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/runjsontests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/rununittests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/version diff --git a/tags/jsoncpp/0.6.0-rc1/AUTHORS b/tags/jsoncpp/0.6.0-rc1/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc1/LICENSE b/tags/jsoncpp/0.6.0-rc1/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/0.6.0-rc1/NEWS.txt b/tags/jsoncpp/0.6.0-rc1/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc1/README.txt b/tags/jsoncpp/0.6.0-rc1/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc1/SConstruct b/tags/jsoncpp/0.6.0-rc1/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc1/doc/footer.html b/tags/jsoncpp/0.6.0-rc1/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.6.0-rc1/doc/header.html b/tags/jsoncpp/0.6.0-rc1/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox new file mode 100644 index 0000000..c7f14d5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox @@ -0,0 +1,35 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doxybuild.py b/tags/jsoncpp/0.6.0-rc1/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/config.h b/tags/jsoncpp/0.6.0-rc1/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/features.h b/tags/jsoncpp/0.6.0-rc1/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/json.h b/tags/jsoncpp/0.6.0-rc1/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h new file mode 100644 index 0000000..5e4c32a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/value.h b/tags/jsoncpp/0.6.0-rc1/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makerelease.py b/tags/jsoncpp/0.6.0-rc1/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/version b/tags/jsoncpp/0.6.0-rc1/version new file mode 100644 index 0000000..8d1042e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/version @@ -0,0 +1 @@ +0.6.0-rc1 \ No newline at end of file From 832dfb5828c66c0f6f87046cb5b1594a3fd2bcd7 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 07:06:07 +0000 Subject: [PATCH 177/268] Added support for amalgated source and header generation (a la sqlite). Refer to README.txt section "Generating amalgated source and header" for detail. The amalgated sources are generated by concatenating JsonCpp source in the correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of other headers. Sources and header has been modified to prevent any inclusion when this macro is defined. The script amalgate.py handle the generation. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@177 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/amalgate.py | 147 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 trunk/jsoncpp/amalgate.py diff --git a/trunk/jsoncpp/amalgate.py b/trunk/jsoncpp/amalgate.py new file mode 100644 index 0000000..502971c --- /dev/null +++ b/trunk/jsoncpp/amalgate.py @@ -0,0 +1,147 @@ +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalagatedFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( '\n' ): + text += '\n' + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( '' ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '' ) + add_marker( 'Beginning' ) + f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) + content = f.read() + if wrap_in_comment: + content = '/*\n' + content + '\n*/' + self.add_text( content ) + f.close() + add_marker( 'End' ) + self.add_text( '\n\n\n\n' ) + + def get_value( self ): + return ''.join( self.blocks ).replace('\r\n','\n') + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, 'wb' ) + f.write( self.get_value() ) + f.close() + +def amalgate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print 'Amalgating header...' + header = AmalagatedFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_file( 'include/json/features.h' ) + header.add_file( 'include/json/value.h' ) + header.add_file( 'include/json/reader.h' ) + header.add_file( 'include/json/writer.h' ) + header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print 'Writing amalgated header to %r' % target_header_path + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + '-forwards' + ext + print 'Amalgating forward header...' + header = AmalagatedFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) + header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print 'Writing amalgated forward header to %r' % target_forward_header_path + header.write_to( target_forward_header_path ) + + print 'Amalgating source...' + source = AmalagatedFile( source_top_dir ) + source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) + source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + source.add_file( 'LICENSE', wrap_in_comment=True ) + source.add_text( '' ) + source.add_text( '#include <%s>' % header_include_path ) + source.add_text( '' ) + source.add_file( 'src/lib_json\json_tool.h' ) + source.add_file( 'src/lib_json\json_reader.cpp' ) + source.add_file( 'src/lib_json\json_batchallocator.h' ) + source.add_file( 'src/lib_json\json_valueiterator.inl' ) + source.add_file( 'src/lib_json\json_value.cpp' ) + source.add_file( 'src/lib_json\json_writer.cpp' ) + + print 'Writing amalgated source to %r' % target_source_path + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', + help="""Output .cpp source path. [Default: %default]""") + parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + '\n' ) + sys.exit( 1 ) + else: + print 'Source succesfully amalagated' + +if __name__ == '__main__': + main() From f188a276b2c99b70b5b06f09d8f0717bcbc0855d Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 07:06:33 +0000 Subject: [PATCH 178/268] Need more tests on unicode git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@178 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/doc/roadmap.dox | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index c7f14d5..e6fc17a 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -32,4 +32,6 @@ - Provides support for static property name definition avoiding allocation - Static property dictionnary can be provided to JSON reader - Performance scenario & benchmarking + \section testing Testing + - Adds more tests for unicode parsing (e.g. including surrogate and error detection). */ From 36aaef587fc012a3b5f9ec2ff717e09a3d44370c Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 07:09:26 +0000 Subject: [PATCH 179/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@179 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc1/AUTHORS | 1 - tags/jsoncpp/0.6.0-rc1/LICENSE | 55 - tags/jsoncpp/0.6.0-rc1/NEWS.txt | 95 - tags/jsoncpp/0.6.0-rc1/README.txt | 172 -- tags/jsoncpp/0.6.0-rc1/SConstruct | 248 -- tags/jsoncpp/0.6.0-rc1/devtools/__init__.py | 1 - tags/jsoncpp/0.6.0-rc1/devtools/antglob.py | 201 -- tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py | 63 - .../0.6.0-rc1/devtools/licenseupdater.py | 93 - tags/jsoncpp/0.6.0-rc1/devtools/tarball.py | 53 - tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.6.0-rc1/doc/footer.html | 23 - tags/jsoncpp/0.6.0-rc1/doc/header.html | 24 - tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox | 126 - tags/jsoncpp/0.6.0-rc1/doc/readme.txt | 1 - tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox | 35 - tags/jsoncpp/0.6.0-rc1/doxybuild.py | 169 -- .../jsoncpp/0.6.0-rc1/include/json/autolink.h | 24 - tags/jsoncpp/0.6.0-rc1/include/json/config.h | 96 - .../jsoncpp/0.6.0-rc1/include/json/features.h | 49 - .../jsoncpp/0.6.0-rc1/include/json/forwards.h | 44 - tags/jsoncpp/0.6.0-rc1/include/json/json.h | 15 - tags/jsoncpp/0.6.0-rc1/include/json/reader.h | 214 -- tags/jsoncpp/0.6.0-rc1/include/json/value.h | 1103 --------- tags/jsoncpp/0.6.0-rc1/include/json/writer.h | 185 -- .../0.6.0-rc1/makefiles/vs71/jsoncpp.sln | 46 - .../0.6.0-rc1/makefiles/vs71/jsontest.vcproj | 119 - .../0.6.0-rc1/makefiles/vs71/lib_json.vcproj | 214 -- .../makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.6.0-rc1/makerelease.py | 380 --- .../jsoncpp/0.6.0-rc1/scons-tools/globtool.py | 53 - tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py | 179 -- .../0.6.0-rc1/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py | 82 - .../0.6.0-rc1/src/jsontestrunner/main.cpp | 269 --- .../0.6.0-rc1/src/jsontestrunner/sconscript | 9 - .../src/lib_json/json_batchallocator.h | 130 - .../src/lib_json/json_internalarray.inl | 456 ---- .../src/lib_json/json_internalmap.inl | 615 ----- .../0.6.0-rc1/src/lib_json/json_reader.cpp | 880 ------- .../0.6.0-rc1/src/lib_json/json_tool.h | 93 - .../0.6.0-rc1/src/lib_json/json_value.cpp | 1847 -------------- .../src/lib_json/json_valueiterator.inl | 299 --- .../0.6.0-rc1/src/lib_json/json_writer.cpp | 838 ------- .../jsoncpp/0.6.0-rc1/src/lib_json/sconscript | 8 - .../0.6.0-rc1/src/test_lib_json/jsontest.cpp | 608 ----- .../0.6.0-rc1/src/test_lib_json/jsontest.h | 259 -- .../0.6.0-rc1/src/test_lib_json/main.cpp | 271 --- .../0.6.0-rc1/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.6.0-rc1/test/cleantests.py | 10 - .../test/data/fail_test_array_01.json | 1 - .../test/data/test_array_01.expected | 1 - .../0.6.0-rc1/test/data/test_array_01.json | 1 - .../test/data/test_array_02.expected | 2 - .../0.6.0-rc1/test/data/test_array_02.json | 1 - .../test/data/test_array_03.expected | 6 - .../0.6.0-rc1/test/data/test_array_03.json | 1 - .../test/data/test_array_04.expected | 5 - .../0.6.0-rc1/test/data/test_array_04.json | 1 - .../test/data/test_array_05.expected | 100 - .../0.6.0-rc1/test/data/test_array_05.json | 1 - .../test/data/test_array_06.expected | 5 - .../0.6.0-rc1/test/data/test_array_06.json | 4 - .../test/data/test_basic_01.expected | 1 - .../0.6.0-rc1/test/data/test_basic_01.json | 1 - .../test/data/test_basic_02.expected | 1 - .../0.6.0-rc1/test/data/test_basic_02.json | 1 - .../test/data/test_basic_03.expected | 3 - .../0.6.0-rc1/test/data/test_basic_03.json | 3 - .../test/data/test_basic_04.expected | 2 - .../0.6.0-rc1/test/data/test_basic_04.json | 2 - .../test/data/test_basic_05.expected | 2 - .../0.6.0-rc1/test/data/test_basic_05.json | 2 - .../test/data/test_basic_06.expected | 2 - .../0.6.0-rc1/test/data/test_basic_06.json | 2 - .../test/data/test_basic_07.expected | 2 - .../0.6.0-rc1/test/data/test_basic_07.json | 2 - .../test/data/test_basic_08.expected | 2 - .../0.6.0-rc1/test/data/test_basic_08.json | 3 - .../test/data/test_basic_09.expected | 2 - .../0.6.0-rc1/test/data/test_basic_09.json | 4 - .../test/data/test_comment_01.expected | 8 - .../0.6.0-rc1/test/data/test_comment_01.json | 8 - .../test/data/test_complex_01.expected | 20 - .../0.6.0-rc1/test/data/test_complex_01.json | 17 - .../test/data/test_integer_01.expected | 1 - .../0.6.0-rc1/test/data/test_integer_01.json | 2 - .../test/data/test_integer_02.expected | 1 - .../0.6.0-rc1/test/data/test_integer_02.json | 2 - .../test/data/test_integer_03.expected | 1 - .../0.6.0-rc1/test/data/test_integer_03.json | 2 - .../test/data/test_integer_04.expected | 2 - .../0.6.0-rc1/test/data/test_integer_04.json | 3 - .../test/data/test_integer_05.expected | 2 - .../0.6.0-rc1/test/data/test_integer_05.json | 2 - .../test/data/test_integer_06_64bits.expected | 1 - .../test/data/test_integer_06_64bits.json | 2 - .../test/data/test_integer_07_64bits.expected | 1 - .../test/data/test_integer_07_64bits.json | 2 - .../test/data/test_integer_08_64bits.expected | 1 - .../test/data/test_integer_08_64bits.json | 2 - .../test/data/test_large_01.expected | 2122 ----------------- .../0.6.0-rc1/test/data/test_large_01.json | 2 - .../test/data/test_object_01.expected | 1 - .../0.6.0-rc1/test/data/test_object_01.json | 1 - .../test/data/test_object_02.expected | 2 - .../0.6.0-rc1/test/data/test_object_02.json | 1 - .../test/data/test_object_03.expected | 4 - .../0.6.0-rc1/test/data/test_object_03.json | 5 - .../test/data/test_object_04.expected | 2 - .../0.6.0-rc1/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.6.0-rc1/test/data/test_real_01.expected | 2 - .../0.6.0-rc1/test/data/test_real_01.json | 3 - .../0.6.0-rc1/test/data/test_real_02.expected | 2 - .../0.6.0-rc1/test/data/test_real_02.json | 3 - .../0.6.0-rc1/test/data/test_real_03.expected | 2 - .../0.6.0-rc1/test/data/test_real_03.json | 3 - .../0.6.0-rc1/test/data/test_real_04.expected | 2 - .../0.6.0-rc1/test/data/test_real_04.json | 3 - .../0.6.0-rc1/test/data/test_real_05.expected | 3 - .../0.6.0-rc1/test/data/test_real_05.json | 3 - .../0.6.0-rc1/test/data/test_real_06.expected | 3 - .../0.6.0-rc1/test/data/test_real_06.json | 3 - .../0.6.0-rc1/test/data/test_real_07.expected | 3 - .../0.6.0-rc1/test/data/test_real_07.json | 3 - .../test/data/test_string_01.expected | 1 - .../0.6.0-rc1/test/data/test_string_01.json | 1 - .../test/data/test_string_02.expected | 1 - .../0.6.0-rc1/test/data/test_string_02.json | 1 - .../test/data/test_string_03.expected | 1 - .../0.6.0-rc1/test/data/test_string_03.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - .../0.6.0-rc1/test/generate_expected.py | 11 - .../0.6.0-rc1/test/jsonchecker/fail1.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail10.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail11.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail12.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail13.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail14.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail15.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail16.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail17.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail18.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail19.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail2.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail20.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail21.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail22.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail23.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail24.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail25.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail26.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail27.json | 2 - .../0.6.0-rc1/test/jsonchecker/fail28.json | 2 - .../0.6.0-rc1/test/jsonchecker/fail29.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail3.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail30.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail31.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail32.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail33.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail4.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail5.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail6.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail7.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail8.json | 1 - .../0.6.0-rc1/test/jsonchecker/fail9.json | 1 - .../0.6.0-rc1/test/jsonchecker/pass1.json | 58 - .../0.6.0-rc1/test/jsonchecker/pass2.json | 1 - .../0.6.0-rc1/test/jsonchecker/pass3.json | 6 - .../0.6.0-rc1/test/jsonchecker/readme.txt | 3 - .../0.6.0-rc1/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.6.0-rc1/test/runjsontests.py | 134 -- tags/jsoncpp/0.6.0-rc1/test/rununittests.py | 73 - tags/jsoncpp/0.6.0-rc1/version | 1 - 185 files changed, 15355 deletions(-) delete mode 100644 tags/jsoncpp/0.6.0-rc1/AUTHORS delete mode 100644 tags/jsoncpp/0.6.0-rc1/LICENSE delete mode 100644 tags/jsoncpp/0.6.0-rc1/NEWS.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/README.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/SConstruct delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/footer.html delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/header.html delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.6.0-rc1/doxybuild.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/config.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/features.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/json.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/reader.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/value.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/writer.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc1/makerelease.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/cleantests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/test/rununittests.py delete mode 100644 tags/jsoncpp/0.6.0-rc1/version diff --git a/tags/jsoncpp/0.6.0-rc1/AUTHORS b/tags/jsoncpp/0.6.0-rc1/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.6.0-rc1/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc1/LICENSE b/tags/jsoncpp/0.6.0-rc1/LICENSE deleted file mode 100644 index ca2bfe1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/tags/jsoncpp/0.6.0-rc1/NEWS.txt b/tags/jsoncpp/0.6.0-rc1/NEWS.txt deleted file mode 100644 index 7978c0a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/NEWS.txt +++ /dev/null @@ -1,95 +0,0 @@ - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc1/README.txt b/tags/jsoncpp/0.6.0-rc1/README.txt deleted file mode 100644 index ba70329..0000000 --- a/tags/jsoncpp/0.6.0-rc1/README.txt +++ /dev/null @@ -1,172 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate -JSON value, handle serialization and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - msvc90 Microsoft Visual Studio 2008 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -Notes: if you are building with Microsoft Visual Studio 2008, you need to -setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) -before running scons. - -Adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -Notes that test can be run by scons using the 'check' target (see above). - -You need to run test manually only if you are troubleshooting an issue. - -In the instruction below, replace "path to jsontest.exe" with the path -of the 'jsontest' executable that was compiled on your platform. - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - -Notes that the documentation is also available for download as a tarball. -The documentation of the latest release is available online at: -http://jsoncpp.sourceforge.net/ - -* Generating amalgated source and header - ====================================== - -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion in an existing project. - -The amalgated source can be generated at any time by running the following -command from the top-directory (requires python 2.6): - -python amalgate.py - -It is possible to specify header name. See -h options for detail. By default, -the following files are generated: -- dist/jsoncpp.cpp: source file that need to be added to your project -- dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgated source. This header -only depends on standard headers. -- dist/json/json-forwards.h: header the provides forward declaration -of all JsonCpp types. This typically what should be included in headers to -speed-up compilation. - -The amalgated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of -other headers. - -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. - -* License - ======= - -See file LICENSE for details. Basically JsonCpp is licensed under -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc1/SConstruct b/tags/jsoncpp/0.6.0-rc1/SConstruct deleted file mode 100644 index 23225cb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py deleted file mode 100644 index 03e0467..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc1/doc/footer.html b/tags/jsoncpp/0.6.0-rc1/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.6.0-rc1/doc/header.html b/tags/jsoncpp/0.6.0-rc1/doc/header.html deleted file mode 100644 index 1a6ad61..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox deleted file mode 100644 index 97cc108..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox +++ /dev/null @@ -1,126 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space": true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _pbuild Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _news What's New? -The description of latest changes can be found in -NEWS.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest NEWS.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -See file LICENSE in the top-directory of the project. - -Basically JsonCpp is licensed under MIT license, or public domain if desired -and recognized in your jurisdiction. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox deleted file mode 100644 index c7f14d5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox +++ /dev/null @@ -1,35 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_writer Writter control - Provides more control to determine how specific items are serialized when JSON allow choice: - - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". - - Optionally allow escaping of "/" using "\/". - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - Stream based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking -*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doxybuild.py b/tags/jsoncpp/0.6.0-rc1/doxybuild.py deleted file mode 100644 index 03ad68d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/doxybuild.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'LICENSE', - 'NEWS.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h deleted file mode 100644 index 02328d1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/config.h b/tags/jsoncpp/0.6.0-rc1/include/json/config.h deleted file mode 100644 index 24991d5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/config.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -/// If defined, indicates that the source file is amalgated -/// to prevent private header inclusion. -/// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGATED - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages, and 64 bits integer support is disabled. -// #define JSON_NO_INT64 1 - -#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 -// Microsoft Visual Studio 6 only support conversion from __int64 to double -// (no conversion from unsigned __int64). -#define JSON_USE_INT64_DOUBLE_CONVERSION 1 -#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 - -#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 -/// Indicates that the following function is deprecated. -# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) -#endif - -#if !defined(JSONCPP_DEPRECATED) -# define JSONCPP_DEPRECATED(message) -#endif // if !defined(JSONCPP_DEPRECATED) - -namespace Json { - typedef int Int; - typedef unsigned int UInt; -# if defined(JSON_NO_INT64) - typedef int LargestInt; - typedef unsigned int LargestUInt; -# undef JSON_HAS_INT64 -# else // if defined(JSON_NO_INT64) - // For Microsoft Visual use specific types as long long is not supported -# if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int64; - typedef unsigned __int64 UInt64; -# else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int64; - typedef unsigned long long int UInt64; -# endif // if defined(_MSC_VER) - typedef Int64 LargestInt; - typedef UInt64 LargestUInt; -# define JSON_HAS_INT64 -# endif // if defined(JSON_NO_INT64) -} // end namespace Json - - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/features.h b/tags/jsoncpp/0.6.0-rc1/include/json/features.h deleted file mode 100644 index 0b53db1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/features.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h deleted file mode 100644 index 083d44f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "config.h" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef unsigned int ArrayIndex; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/json.h b/tags/jsoncpp/0.6.0-rc1/include/json/json.h deleted file mode 100644 index da5fc96..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/json.h +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h deleted file mode 100644 index 5e4c32a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "features.h" -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. - * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. - \ Must be >= beginDoc. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - * \deprecated Use getFormattedErrorMessages() instead (typo fix). - */ - JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") - std::string getFormatedErrorMessages() const; - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormattedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/value.h b/tags/jsoncpp/0.6.0-rc1/include/json/value.h deleted file mode 100644 index 66821ab..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/value.h +++ /dev/null @@ -1,1103 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; -# if defined(JSON_HAS_INT64) - typedef Json::UInt64 UInt64; - typedef Json::Int64 Int64; -#endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; - typedef Json::ArrayIndex ArrayIndex; - - static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. - static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. - static const LargestUInt maxLargestUInt; - - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. - static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. - static const UInt maxUInt; - - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. - static const UInt64 maxUInt64; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( ArrayIndex index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - ArrayIndex index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - ArrayIndex index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); -#if defined(JSON_HAS_INT64) - Value( Int64 value ); - Value( UInt64 value ); -#endif // if defined(JSON_HAS_INT64) - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ); - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - Int64 asInt64() const; - UInt64 asUInt64() const; - LargestInt asLargestInt() const; - LargestUInt asLargestUInt() const; - float asFloat() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - ArrayIndex size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( ArrayIndex size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( ArrayIndex index ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( int index ); - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( ArrayIndex index ) const; - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( int index ) const; - - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( ArrayIndex index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( ArrayIndex index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - LargestInt int_; - LargestUInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( ArrayIndex index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - ArrayIndex index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h deleted file mode 100644 index cb0bd9b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -#if !defined(JSON_IS_AMALGATED) -# include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - -# if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); -# endif // if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( LargestInt value ); - std::string JSON_API valueToString( LargestUInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc1/makerelease.py b/tags/jsoncpp/0.6.0-rc1/makerelease.py deleted file mode 100644 index a6e330e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/makerelease.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev - -When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball -import amalgate - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir - print 'Generating amalgated source tarball to', amalgated_tarball_path - amalgated_dir = 'dist/amalgated' - amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) - amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version - tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], - amalgated_dir, prefix_dir=amalgated_source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp deleted file mode 100644 index dfb6150..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -/* This executable is used for testing parser/writer using real JSON files. - */ - - -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormattedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - - -static void -printConfig() -{ - // Print the configuration used to compile JsonCpp -#if defined(JSON_NO_INT64) - printf( "JSON_NO_INT64=1\n" ); -#else - printf( "JSON_NO_INT64=0\n" ); -#endif -} - - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( std::string(argv[1]) == "--json-config" ) - { - printConfig(); - return 3; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - try - { - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - } - catch ( const std::exception &e ) - { - printf( "Unhandled exception:\n%s\n", e.what() ); - exitCode = 1; - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h deleted file mode 100644 index 173e2ed..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl deleted file mode 100644 index 3a532ad..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl deleted file mode 100644 index f2fa160..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp deleted file mode 100644 index 7c594e2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,880 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &tokenStart ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &tokenStart ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - while ( true ) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - // Attempts to parse the number as an integer. If the number is - // larger than the maximum supported value of an integer then - // we decode the number as a double. - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) - : Value::maxLargestUInt; - Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::LargestUInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - Value::UInt digit(c - '0'); - if ( value >= threshold ) - { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) - { - return decodeDouble( token ); - } - } - value = value * 10 + digit; - } - if ( isNegative ) - currentValue() = -Value::LargestInt( value ); - else if ( value <= Value::LargestUInt(Value::maxInt) ) - currentValue() = Value::LargestInt( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize+1]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - while ( true ) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -// Deprecated. Preserved for backward compatibility -std::string -Reader::getFormatedErrorMessages() const -{ - return getFormattedErrorMessages(); -} - - -std::string -Reader::getFormattedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h deleted file mode 100644 index 658031b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(LargestUInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( LargestUInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp deleted file mode 100644 index c810417..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1847 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include -# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); -const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); -const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); -const UInt64 Value::maxUInt64 = UInt64(-1); -const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); -const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); -const LargestUInt Value::maxLargestUInt = LargestUInt(-1); - - -/// Unknown size marker -enum { unknown = (unsigned)-1 }; - - -/** Duplicates the specified string value. - * @param value Pointer to the string to duplicate. Must be zero-terminated if - * length is "unknown". - * @param length Length of the value. if equals to unknown, then it will be - * computed using strlen(value). - * @return Pointer on the duplicate instance of string. - */ -static inline char * -duplicateStringValue( const char *value, - unsigned int length = unknown ) -{ - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; -} - - -/** Free the string duplicated by duplicateStringValue(). - */ -static inline void -releaseStringValue( char *value ) -{ - if ( value ) - free( value ); -} - -} // namespace Json - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGATED) -# ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -# endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGATED) - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - releaseStringValue( comment_ ); - JSON_ASSERT( text != 0 ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( ArrayIndex index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? duplicateStringValue(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? duplicateStringValue( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - releaseStringValue( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -ArrayIndex -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -#if defined(JSON_HAS_INT64) -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - -#endif // if defined(JSON_HAS_INT64) - - -Value::Value( Int64 value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt64 value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( beginValue, - (unsigned int)(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) -{ - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable -} - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other > *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); - return Int(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); - return Int(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); - return UInt(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); - return UInt(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -# if defined(JSON_HAS_INT64) - -Value::Int64 -Value::asInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt64 -Value::asUInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} -# endif // if defined(JSON_HAS_INT64) - - -LargestInt -Value::asLargestInt() const -{ -#if defined(JSON_NO_INT64) - return asInt(); -#else - return asInt64(); -#endif -} - - -LargestUInt -Value::asLargestUInt() const -{ -#if defined(JSON_NO_INT64) - return asUInt(); -#else - return asUInt64(); -#endif -} - - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -float -Value::asFloat() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0f; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return static_cast( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; - case stringValue: - case arrayValue: - case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0.0f; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -ArrayIndex -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return ArrayIndex( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( ArrayIndex newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ArrayIndex oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( ArrayIndex index = newSize; index < oldSize; ++index ) - { - value_.map_->erase( index ); - } - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( ArrayIndex index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -Value & -Value::operator[]( int index ) -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -const Value & -Value::operator[]( ArrayIndex index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -const Value & -Value::operator[]( int index ) const -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( ArrayIndex index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( ArrayIndex index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - ArrayIndex index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + ArrayIndex(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 7457ca3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp deleted file mode 100644 index 8c4c180..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGATED) -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} - - -std::string valueToString( LargestInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( LargestUInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( LargestUInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -#if defined(JSON_HAS_INT64) - -std::string valueToString( Int value ) -{ - return valueToString( LargestInt(value) ); -} - - -std::string valueToString( UInt value ) -{ - return valueToString( LargestUInt(value) ); -} - -#endif // # if defined(JSON_HAS_INT64) - - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asLargestInt() ); - break; - case uintValue: - document_ += valueToString( value.asLargestUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - while ( true ) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - while ( true ) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp deleted file mode 100644 index 02e7b21..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h deleted file mode 100644 index 0d07238..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( expr ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp deleted file mode 100644 index de64200..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp +++ /dev/null @@ -1,271 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value float_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , float_( 0.00390625f ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json deleted file mode 100644 index 900fcc2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected deleted file mode 100644 index bc9520a1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json deleted file mode 100644 index 360d660..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected deleted file mode 100644 index 39eb798..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json deleted file mode 100644 index 11d8513..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ --9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected deleted file mode 100644 index 831f432..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json deleted file mode 100644 index 6e1fb04..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -18446744073709551615 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected deleted file mode 100644 index 6ed627a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json deleted file mode 100644 index f0a220f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json +++ /dev/null @@ -1 +0,0 @@ -"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py deleted file mode 100644 index ffe8bd5..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - is_json_checker_test = (input_path in test_jsonchecker) or expect_failure - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc1/version b/tags/jsoncpp/0.6.0-rc1/version deleted file mode 100644 index 8d1042e..0000000 --- a/tags/jsoncpp/0.6.0-rc1/version +++ /dev/null @@ -1 +0,0 @@ -0.6.0-rc1 \ No newline at end of file From 37751771f4d36ce4fce4a71b69be2688c04a8dc4 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 07:10:38 +0000 Subject: [PATCH 180/268] Release 0.6.0-rc1 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@180 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc1/AUTHORS | 1 + tags/jsoncpp/0.6.0-rc1/LICENSE | 55 + tags/jsoncpp/0.6.0-rc1/NEWS.txt | 95 + tags/jsoncpp/0.6.0-rc1/README.txt | 172 ++ tags/jsoncpp/0.6.0-rc1/SConstruct | 248 ++ tags/jsoncpp/0.6.0-rc1/amalgate.py | 147 ++ tags/jsoncpp/0.6.0-rc1/devtools/__init__.py | 1 + tags/jsoncpp/0.6.0-rc1/devtools/antglob.py | 201 ++ tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py | 63 + .../0.6.0-rc1/devtools/licenseupdater.py | 93 + tags/jsoncpp/0.6.0-rc1/devtools/tarball.py | 53 + tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.6.0-rc1/doc/footer.html | 23 + tags/jsoncpp/0.6.0-rc1/doc/header.html | 24 + tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox | 126 + tags/jsoncpp/0.6.0-rc1/doc/readme.txt | 1 + tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox | 37 + tags/jsoncpp/0.6.0-rc1/doxybuild.py | 169 ++ .../jsoncpp/0.6.0-rc1/include/json/autolink.h | 24 + tags/jsoncpp/0.6.0-rc1/include/json/config.h | 96 + .../jsoncpp/0.6.0-rc1/include/json/features.h | 49 + .../jsoncpp/0.6.0-rc1/include/json/forwards.h | 44 + tags/jsoncpp/0.6.0-rc1/include/json/json.h | 15 + tags/jsoncpp/0.6.0-rc1/include/json/reader.h | 214 ++ tags/jsoncpp/0.6.0-rc1/include/json/value.h | 1103 +++++++++ tags/jsoncpp/0.6.0-rc1/include/json/writer.h | 185 ++ .../0.6.0-rc1/makefiles/vs71/jsoncpp.sln | 46 + .../0.6.0-rc1/makefiles/vs71/jsontest.vcproj | 119 + .../0.6.0-rc1/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.6.0-rc1/makerelease.py | 380 +++ .../jsoncpp/0.6.0-rc1/scons-tools/globtool.py | 53 + tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py | 179 ++ .../0.6.0-rc1/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py | 82 + .../0.6.0-rc1/src/jsontestrunner/main.cpp | 269 +++ .../0.6.0-rc1/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../0.6.0-rc1/src/lib_json/json_reader.cpp | 880 +++++++ .../0.6.0-rc1/src/lib_json/json_tool.h | 93 + .../0.6.0-rc1/src/lib_json/json_value.cpp | 1847 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../0.6.0-rc1/src/lib_json/json_writer.cpp | 838 +++++++ .../jsoncpp/0.6.0-rc1/src/lib_json/sconscript | 8 + .../0.6.0-rc1/src/test_lib_json/jsontest.cpp | 608 +++++ .../0.6.0-rc1/src/test_lib_json/jsontest.h | 259 ++ .../0.6.0-rc1/src/test_lib_json/main.cpp | 271 +++ .../0.6.0-rc1/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.6.0-rc1/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../0.6.0-rc1/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../0.6.0-rc1/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../0.6.0-rc1/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../0.6.0-rc1/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../0.6.0-rc1/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../0.6.0-rc1/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../0.6.0-rc1/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../0.6.0-rc1/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../0.6.0-rc1/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../0.6.0-rc1/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../0.6.0-rc1/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../0.6.0-rc1/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../0.6.0-rc1/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../0.6.0-rc1/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../0.6.0-rc1/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../0.6.0-rc1/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../0.6.0-rc1/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../0.6.0-rc1/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../0.6.0-rc1/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../0.6.0-rc1/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../0.6.0-rc1/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../0.6.0-rc1/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.6.0-rc1/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../0.6.0-rc1/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../0.6.0-rc1/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../0.6.0-rc1/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../0.6.0-rc1/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.6.0-rc1/test/data/test_real_01.expected | 2 + .../0.6.0-rc1/test/data/test_real_01.json | 3 + .../0.6.0-rc1/test/data/test_real_02.expected | 2 + .../0.6.0-rc1/test/data/test_real_02.json | 3 + .../0.6.0-rc1/test/data/test_real_03.expected | 2 + .../0.6.0-rc1/test/data/test_real_03.json | 3 + .../0.6.0-rc1/test/data/test_real_04.expected | 2 + .../0.6.0-rc1/test/data/test_real_04.json | 3 + .../0.6.0-rc1/test/data/test_real_05.expected | 3 + .../0.6.0-rc1/test/data/test_real_05.json | 3 + .../0.6.0-rc1/test/data/test_real_06.expected | 3 + .../0.6.0-rc1/test/data/test_real_06.json | 3 + .../0.6.0-rc1/test/data/test_real_07.expected | 3 + .../0.6.0-rc1/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../0.6.0-rc1/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../0.6.0-rc1/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../0.6.0-rc1/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../0.6.0-rc1/test/generate_expected.py | 11 + .../0.6.0-rc1/test/jsonchecker/fail1.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail10.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail11.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail12.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail13.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail14.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail15.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail16.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail17.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail18.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail19.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail2.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail20.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail21.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail22.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail23.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail24.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail25.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail26.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail27.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail28.json | 2 + .../0.6.0-rc1/test/jsonchecker/fail29.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail3.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail30.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail31.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail32.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail33.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail4.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail5.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail6.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail7.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail8.json | 1 + .../0.6.0-rc1/test/jsonchecker/fail9.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass1.json | 58 + .../0.6.0-rc1/test/jsonchecker/pass2.json | 1 + .../0.6.0-rc1/test/jsonchecker/pass3.json | 6 + .../0.6.0-rc1/test/jsonchecker/readme.txt | 3 + .../0.6.0-rc1/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.6.0-rc1/test/runjsontests.py | 134 ++ tags/jsoncpp/0.6.0-rc1/test/rununittests.py | 73 + tags/jsoncpp/0.6.0-rc1/version | 1 + 186 files changed, 15504 insertions(+) create mode 100644 tags/jsoncpp/0.6.0-rc1/AUTHORS create mode 100644 tags/jsoncpp/0.6.0-rc1/LICENSE create mode 100644 tags/jsoncpp/0.6.0-rc1/NEWS.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/README.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/SConstruct create mode 100644 tags/jsoncpp/0.6.0-rc1/amalgate.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/__init__.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/antglob.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/0.6.0-rc1/devtools/tarball.py create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/footer.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/header.html create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.6.0-rc1/doxybuild.py create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/autolink.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/config.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/features.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/forwards.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/json.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/reader.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/value.h create mode 100644 tags/jsoncpp/0.6.0-rc1/include/json/writer.h create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc1/makerelease.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc1/test/cleantests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/generate_expected.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/runjsontests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/test/rununittests.py create mode 100644 tags/jsoncpp/0.6.0-rc1/version diff --git a/tags/jsoncpp/0.6.0-rc1/AUTHORS b/tags/jsoncpp/0.6.0-rc1/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc1/LICENSE b/tags/jsoncpp/0.6.0-rc1/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/0.6.0-rc1/NEWS.txt b/tags/jsoncpp/0.6.0-rc1/NEWS.txt new file mode 100644 index 0000000..7978c0a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/NEWS.txt @@ -0,0 +1,95 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc1/README.txt b/tags/jsoncpp/0.6.0-rc1/README.txt new file mode 100644 index 0000000..ba70329 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgated source and header + ====================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of +other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc1/SConstruct b/tags/jsoncpp/0.6.0-rc1/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.6.0-rc1/amalgate.py b/tags/jsoncpp/0.6.0-rc1/amalgate.py new file mode 100644 index 0000000..502971c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/amalgate.py @@ -0,0 +1,147 @@ +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalagatedFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( '\n' ): + text += '\n' + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( '' ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '' ) + add_marker( 'Beginning' ) + f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) + content = f.read() + if wrap_in_comment: + content = '/*\n' + content + '\n*/' + self.add_text( content ) + f.close() + add_marker( 'End' ) + self.add_text( '\n\n\n\n' ) + + def get_value( self ): + return ''.join( self.blocks ).replace('\r\n','\n') + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, 'wb' ) + f.write( self.get_value() ) + f.close() + +def amalgate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print 'Amalgating header...' + header = AmalagatedFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_file( 'include/json/features.h' ) + header.add_file( 'include/json/value.h' ) + header.add_file( 'include/json/reader.h' ) + header.add_file( 'include/json/writer.h' ) + header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print 'Writing amalgated header to %r' % target_header_path + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + '-forwards' + ext + print 'Amalgating forward header...' + header = AmalagatedFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) + header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print 'Writing amalgated forward header to %r' % target_forward_header_path + header.write_to( target_forward_header_path ) + + print 'Amalgating source...' + source = AmalagatedFile( source_top_dir ) + source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) + source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + source.add_file( 'LICENSE', wrap_in_comment=True ) + source.add_text( '' ) + source.add_text( '#include <%s>' % header_include_path ) + source.add_text( '' ) + source.add_file( 'src/lib_json\json_tool.h' ) + source.add_file( 'src/lib_json\json_reader.cpp' ) + source.add_file( 'src/lib_json\json_batchallocator.h' ) + source.add_file( 'src/lib_json\json_valueiterator.inl' ) + source.add_file( 'src/lib_json\json_value.cpp' ) + source.add_file( 'src/lib_json\json_writer.cpp' ) + + print 'Writing amalgated source to %r' % target_source_path + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', + help="""Output .cpp source path. [Default: %default]""") + parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + '\n' ) + sys.exit( 1 ) + else: + print 'Source succesfully amalagated' + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc1/doc/footer.html b/tags/jsoncpp/0.6.0-rc1/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.6.0-rc1/doc/header.html b/tags/jsoncpp/0.6.0-rc1/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doc/readme.txt b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox new file mode 100644 index 0000000..e6fc17a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doc/roadmap.dox @@ -0,0 +1,37 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking + \section testing Testing + - Adds more tests for unicode parsing (e.g. including surrogate and error detection). +*/ diff --git a/tags/jsoncpp/0.6.0-rc1/doxybuild.py b/tags/jsoncpp/0.6.0-rc1/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/config.h b/tags/jsoncpp/0.6.0-rc1/include/json/config.h new file mode 100644 index 0000000..24991d5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGATED + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/features.h b/tags/jsoncpp/0.6.0-rc1/include/json/features.h new file mode 100644 index 0000000..0b53db1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h new file mode 100644 index 0000000..083d44f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/json.h b/tags/jsoncpp/0.6.0-rc1/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/reader.h b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h new file mode 100644 index 0000000..5e4c32a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/value.h b/tags/jsoncpp/0.6.0-rc1/include/json/value.h new file mode 100644 index 0000000..66821ab --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ); + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/include/json/writer.h b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h new file mode 100644 index 0000000..cb0bd9b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGATED) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGATED) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc1/makerelease.py b/tags/jsoncpp/0.6.0-rc1/makerelease.py new file mode 100644 index 0000000..a6e330e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir + print 'Generating amalgated source tarball to', amalgated_tarball_path + amalgated_dir = 'dist/amalgated' + amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) + amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version + tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], + amalgated_dir, prefix_dir=amalgated_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..7c594e2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &tokenStart ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &tokenStart ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + while ( true ) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + while ( true ) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp new file mode 100644 index 0000000..c810417 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_value.cpp @@ -0,0 +1,1847 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +enum { unknown = (unsigned)-1 }; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGATED) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGATED) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) +{ + /* + int typeDelta = other.type_ - type_; + switch ( type_ ) + { + case nullValue: + + return other.type_ == type_; + case intValue: + if ( other.type_.isNumeric() + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue, + break; + case arrayValue: + delete value_.array_; + break; + case objectValue: + delete value_.map_; + default: + JSON_ASSERT_UNREACHABLE; + } + */ + return 0; // unreachable +} + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other > *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..8c4c180 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGATED) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGATED) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + while ( true ) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + while ( true ) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp new file mode 100644 index 0000000..de64200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/main.cpp @@ -0,0 +1,271 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc1/test/cleantests.py b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/test/rununittests.py b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc1/version b/tags/jsoncpp/0.6.0-rc1/version new file mode 100644 index 0000000..8d1042e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc1/version @@ -0,0 +1 @@ +0.6.0-rc1 \ No newline at end of file From 2c8d6e0e7b4a81892783d18777f6c67f32186a8f Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 07:30:45 +0000 Subject: [PATCH 181/268] Released 0.6.0-rc1 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@181 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 8d1042e..7defe1e 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.6.0-rc1 \ No newline at end of file +0.6.0-dev \ No newline at end of file From f858e23b87cae2d2c6671b54eacaa32ca40eba10 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 09:54:49 +0000 Subject: [PATCH 182/268] Bug #3200841: removed "warning C4127: conditional expression is constant" concerning infinite loop by replacing while (true) with for (;;). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@182 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 4 ++-- trunk/jsoncpp/src/lib_json/json_writer.cpp | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 7c594e2..49d9d29 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -519,7 +519,7 @@ Reader::readArray( Token &tokenStart ) return true; } int index = 0; - while ( true ) + for (;;) { Value &value = currentValue()[ index++ ]; nodes_.push( &value ); @@ -762,7 +762,7 @@ Reader::recoverFromError( TokenType skipUntilToken ) { int errorCount = int(errors_.size()); Token skip; - while ( true ) + for (;;) { if ( !readToken(skip) ) errors_.resize( errorCount ); // discard errors caused by recovery diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 8c4c180..d777c15 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -332,7 +332,7 @@ StyledWriter::writeValue( const Value &value ) writeWithIndent( "{" ); indent(); Value::Members::iterator it = members.begin(); - while ( true ) + for (;;) { const std::string &name = *it; const Value &childValue = value[name]; @@ -372,7 +372,7 @@ StyledWriter::writeArrayValue( const Value &value ) indent(); bool hasChildValue = !childValues_.empty(); unsigned index =0; - while ( true ) + for (;;) { const Value &childValue = value[index]; writeCommentBeforeValue( childValue ); @@ -608,7 +608,7 @@ StyledStreamWriter::writeValue( const Value &value ) writeWithIndent( "{" ); indent(); Value::Members::iterator it = members.begin(); - while ( true ) + for (;;) { const std::string &name = *it; const Value &childValue = value[name]; @@ -648,7 +648,7 @@ StyledStreamWriter::writeArrayValue( const Value &value ) indent(); bool hasChildValue = !childValues_.empty(); unsigned index =0; - while ( true ) + for (;;) { const Value &childValue = value[index]; writeCommentBeforeValue( childValue ); From f10813508cca5e1cc91e1710f971baef61b066e7 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 16:51:48 +0000 Subject: [PATCH 183/268] - Bug #3200841: removed "warning C4127: conditional expression is constant" concerning infinite loop by replacing while (true) with for (;;). Added new JSON_FAIL macro. Commented unused parameters. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@183 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 4 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 19 +-- trunk/jsoncpp/src/test_lib_json/main.cpp | 134 +++++++++++++++++++++ 3 files changed, 146 insertions(+), 11 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 49d9d29..15eda1d 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -449,7 +449,7 @@ Reader::readString() bool -Reader::readObject( Token &tokenStart ) +Reader::readObject( Token &/*tokenStart*/ ) { Token tokenName; std::string name; @@ -508,7 +508,7 @@ Reader::readObject( Token &tokenStart ) bool -Reader::readArray( Token &tokenStart ) +Reader::readArray( Token &/*tokenStart*/ ) { currentValue() = Value( arrayValue ); skipSpaces(); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index c810417..c8e0478 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -22,7 +22,8 @@ #define JSON_ASSERT_UNREACHABLE assert( false ) #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) throw std::runtime_error( message ); +#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) JSON_FAIL_MESSAGE( message ) namespace Json { @@ -39,7 +40,7 @@ const LargestUInt Value::maxLargestUInt = LargestUInt(-1); /// Unknown size marker -enum { unknown = (unsigned)-1 }; +static const unsigned int unknown = (unsigned)-1; /** Duplicates the specified string value. @@ -688,7 +689,7 @@ Value::asString() const case realValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to string" ); + JSON_FAIL_MESSAGE( "Type is not convertible to string" ); default: JSON_ASSERT_UNREACHABLE; } @@ -725,7 +726,7 @@ Value::asInt() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to int" ); + JSON_FAIL_MESSAGE( "Type is not convertible to int" ); default: JSON_ASSERT_UNREACHABLE; } @@ -755,7 +756,7 @@ Value::asUInt() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to uint" ); + JSON_FAIL_MESSAGE( "Type is not convertible to uint" ); default: JSON_ASSERT_UNREACHABLE; } @@ -785,7 +786,7 @@ Value::asInt64() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to Int64" ); + JSON_FAIL_MESSAGE( "Type is not convertible to Int64" ); default: JSON_ASSERT_UNREACHABLE; } @@ -813,7 +814,7 @@ Value::asUInt64() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to UInt64" ); + JSON_FAIL_MESSAGE( "Type is not convertible to UInt64" ); default: JSON_ASSERT_UNREACHABLE; } @@ -866,7 +867,7 @@ Value::asDouble() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to double" ); + JSON_FAIL_MESSAGE( "Type is not convertible to double" ); default: JSON_ASSERT_UNREACHABLE; } @@ -895,7 +896,7 @@ Value::asFloat() const case stringValue: case arrayValue: case objectValue: - JSON_ASSERT_MESSAGE( false, "Type is not convertible to float" ); + JSON_FAIL_MESSAGE( "Type is not convertible to float" ); default: JSON_ASSERT_UNREACHABLE; } diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index de64200..004702b 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -37,6 +37,7 @@ struct ValueTest : JsonTest::TestCase Json::Value true_; Json::Value false_; + ValueTest() : emptyArray_( Json::arrayValue ) , emptyObject_( Json::objectValue ) @@ -77,6 +78,10 @@ struct ValueTest : JsonTest::TestCase void checkMemberCount( Json::Value &value, unsigned int expectedCount ); void checkIs( const Json::Value &value, const IsCheck &check ); + + void checkIsLess( const Json::Value &x, const Json::Value &y ); + + void checkIsEqual( const Json::Value &x, const Json::Value &y ); }; @@ -251,6 +256,128 @@ ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) } +JSONTEST_FIXTURE( ValueTest, compareInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10, -10 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10, 0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareUInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0u, 10u ) ); + JSONTEST_ASSERT_PRED( checkIsLess( 0u, Json::Value::maxUInt ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10u, 10u ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareDouble ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10.0, -10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10.0, 0.0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareString ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( "", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "", "a" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abcd", "zyui" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abc", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "abcd", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( " ", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "ABCD", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "ABCD", "ABCD" ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareBoolean ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( false, true ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( false, false ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( true, true ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareArray ) +{ + // array compare size then content + Json::Value emptyArray(Json::arrayValue); + Json::Value l1aArray; + l1aArray.append( 0 ); + Json::Value l1bArray; + l1bArray.append( 10 ); + Json::Value l2aArray; + l2aArray.append( 0 ); + l2aArray.append( 0 ); + Json::Value l2bArray; + l2bArray.append( 0 ); + l2bArray.append( 10 ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l1aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l2aArray, l2bArray ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyArray, Json::Value( emptyArray ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aArray, Json::Value( l1aArray) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2bArray, Json::Value( l2bArray) ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareObject ) +{ + // object compare size then content + Json::Value emptyObject(Json::objectValue); + Json::Value l1aObject; + l1aObject["key1"] = 0; + Json::Value l1bObject; + l1aObject["key1"] = 10; + Json::Value l2aObject; + l2aObject["key1"] = 0; + l2aObject["key2"] = 0; + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l1aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyObject, Json::Value( emptyObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aObject, Json::Value( l1aObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2aObject, Json::Value( l2aObject ) ) ); +} + + +void +ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x < y ); + JSONTEST_ASSERT( y > x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x == y) ); + JSONTEST_ASSERT( !(y == x) ); + JSONTEST_ASSERT( !(x >= y) ); + JSONTEST_ASSERT( !(y <= x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y < x) ); +} + + +void +ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x == y ); + JSONTEST_ASSERT( y == x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y <= x ); + JSONTEST_ASSERT( x >= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x < y) ); + JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y > x) ); +} + int main( int argc, const char *argv[] ) { @@ -267,5 +394,12 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareBoolean ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); return runner.runCommandLine( argc, argv ); } From 6433ba887c2e93f4e4061e7b096ea9d4317ab3cf Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 16:53:10 +0000 Subject: [PATCH 184/268] - Added unit tests for comparison operators (except compare()) - Fixed Value::operator <= implementation (had the semantic of operator >=). Found when addigin unit tests for comparison operators. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@184 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 3 +++ trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 7978c0a..9b5d69c 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -86,6 +86,9 @@ - Bug #3139678: stack buffer overflow when parsing a double with a length of 32 characters. + + - Fixed Value::operator <= implementation (had the semantic of operator >=). + Found when addigin unit tests for comparison operators. * License diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index c8e0478..fd1266b 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -600,7 +600,7 @@ Value::operator <( const Value &other ) const bool Value::operator <=( const Value &other ) const { - return !(other > *this); + return !(other < *this); } bool From 7ec1d25985f6b15fd7b283f9cd600187aa3cd0f0 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 18:41:01 +0000 Subject: [PATCH 185/268] Untabified some sources git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@185 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/reader.h | 10 ++--- trunk/jsoncpp/include/json/value.h | 48 +++++++++++----------- trunk/jsoncpp/src/lib_json/json_reader.cpp | 2 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 8 ++-- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 5 files changed, 35 insertions(+), 35 deletions(-) diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 5e4c32a..34dc785 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -197,11 +197,11 @@ namespace Json { Result: \verbatim { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } } \endverbatim \throw std::exception on parse error. diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 66821ab..b629d8c 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -132,30 +132,30 @@ namespace Json { typedef Json::UInt64 UInt64; typedef Json::Int64 Int64; #endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; typedef Json::ArrayIndex ArrayIndex; static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. + /// Maximum unsigned integer value that can be stored in a Json::Value. static const LargestUInt maxLargestUInt; - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. + /// Maximum unsigned int value that can be stored in a Json::Value. static const UInt maxUInt; - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. static const UInt64 maxUInt64; private: @@ -202,14 +202,14 @@ namespace Json { To create an empty array, pass arrayValue. To create an empty object, pass objectValue. Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. + This is useful since clear() and resize() will not alter types. Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode */ Value( ValueType type = nullValue ); Value( Int value ); @@ -315,24 +315,24 @@ namespace Json { /// this from the operator[] which takes a string.) Value &operator[]( ArrayIndex index ); - /// Access an array element (zero based index ). + /// Access an array element (zero based index ). /// If the array contains less than index element, then null value are inserted /// in the array so that its size is index+1. /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) Value &operator[]( int index ); - /// Access an array element (zero based index ) + /// Access an array element (zero based index ) /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) const Value &operator[]( ArrayIndex index ) const; - /// Access an array element (zero based index ) + /// Access an array element (zero based index ) /// (You may need to say 'value[0u]' to get your compiler to distinguish /// this from the operator[] which takes a string.) const Value &operator[]( int index ) const; - /// If the array contains at least index+1 elements, returns the element value, + /// If the array contains at least index+1 elements, returns the element value, /// otherwise returns defaultValue. Value get( ArrayIndex index, const Value &defaultValue ) const; diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 15eda1d..fa46dca 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -488,7 +488,7 @@ Reader::readObject( Token &/*tokenStart*/ ) if ( !readToken( comma ) || ( comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) + comma.type_ != tokenComment ) ) { return addErrorAndRecover( "Missing ',' or '}' in object declaration", comma, diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index fd1266b..b8de89c 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -827,9 +827,9 @@ LargestInt Value::asLargestInt() const { #if defined(JSON_NO_INT64) - return asInt(); + return asInt(); #else - return asInt64(); + return asInt64(); #endif } @@ -838,9 +838,9 @@ LargestUInt Value::asLargestUInt() const { #if defined(JSON_NO_INT64) - return asUInt(); + return asUInt(); #else - return asUInt64(); + return asUInt64(); #endif } diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index d777c15..90e5c1b 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -656,7 +656,7 @@ StyledStreamWriter::writeArrayValue( const Value &value ) writeWithIndent( childValues_[index] ); else { - writeIndent(); + writeIndent(); writeValue( childValue ); } if ( ++index == size ) From d9383b8e3c5ce7d743d55a72ba77d8aa5277db1b Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 20:11:48 +0000 Subject: [PATCH 186/268] Value::compare() is now const and has an actual implementation with unit tests. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@186 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 3 ++ trunk/jsoncpp/include/json/value.h | 2 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 37 ++++++----------------- trunk/jsoncpp/src/test_lib_json/main.cpp | 25 +++++++++++++++ 4 files changed, 38 insertions(+), 29 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 9b5d69c..71edd19 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -90,6 +90,9 @@ - Fixed Value::operator <= implementation (had the semantic of operator >=). Found when addigin unit tests for comparison operators. + - Value::compare() is now const and has an actual implementation with + unit tests. + * License - See file LICENSE for details. Basically JsonCpp is now licensed under diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index b629d8c..c9c7e1d 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -256,7 +256,7 @@ namespace Json { bool operator ==( const Value &other ) const; bool operator !=( const Value &other ) const; - int compare( const Value &other ); + int compare( const Value &other ) const; const char *asCString() const; std::string asString() const; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index b8de89c..ec3fb2e 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -524,35 +524,16 @@ Value::type() const int -Value::compare( const Value &other ) +Value::compare( const Value &other ) const { - /* - int typeDelta = other.type_ - type_; - switch ( type_ ) - { - case nullValue: - - return other.type_ == type_; - case intValue: - if ( other.type_.isNumeric() - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue, - break; - case arrayValue: - delete value_.array_; - break; - case objectValue: - delete value_.map_; - default: - JSON_ASSERT_UNREACHABLE; - } - */ - return 0; // unreachable + if ( *this < other ) + return -1; + if ( *this > other ) + return 1; + return 0; } + bool Value::operator <( const Value &other ) const { @@ -594,7 +575,7 @@ Value::operator <( const Value &other ) const default: JSON_ASSERT_UNREACHABLE; } - return 0; // unreachable + return false; // unreachable } bool @@ -656,7 +637,7 @@ Value::operator ==( const Value &other ) const default: JSON_ASSERT_UNREACHABLE; } - return 0; // unreachable + return false; // unreachable } bool diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 004702b..3275219 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -256,6 +256,12 @@ ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) } +JSONTEST_FIXTURE( ValueTest, compareNull ) +{ + JSONTEST_ASSERT_PRED( checkIsEqual( Json::Value(), Json::Value() ) ); +} + + JSONTEST_FIXTURE( ValueTest, compareInt ) { JSONTEST_ASSERT_PRED( checkIsLess( 0, 10 ) ); @@ -347,6 +353,19 @@ JSONTEST_FIXTURE( ValueTest, compareObject ) } +JSONTEST_FIXTURE( ValueTest, compareType ) +{ + // object of different type are ordered according to their type + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(), Json::Value(1) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1), Json::Value(1u) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1u), Json::Value(1.0) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1.0), Json::Value("a") ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value("a"), Json::Value(true) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(true), Json::Value(Json::arrayValue) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(Json::arrayValue), Json::Value(Json::objectValue) ) ); +} + + void ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) { @@ -360,6 +379,8 @@ ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) JSONTEST_ASSERT( !(y <= x) ); JSONTEST_ASSERT( !(x > y) ); JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( x.compare( y ) < 0 ); + JSONTEST_ASSERT( y.compare( x ) >= 0 ); } @@ -376,6 +397,8 @@ ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) JSONTEST_ASSERT( !(y < x) ); JSONTEST_ASSERT( !(x > y) ); JSONTEST_ASSERT( !(y > x) ); + JSONTEST_ASSERT( x.compare( y ) == 0 ); + JSONTEST_ASSERT( y.compare( x ) == 0 ); } @@ -394,6 +417,7 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareDouble ); @@ -401,5 +425,6 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareBoolean ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareType ); return runner.runCommandLine( argc, argv ); } From 752f05d19eb199ee9d6c719512554c14ea8759e0 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 21:09:30 +0000 Subject: [PATCH 187/268] Fixed typo: amalga*ma*te. Replaced macro JSON_IS_AMALGATED with JSON_IS_AMALGAMATION git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@187 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 4 ++-- trunk/jsoncpp/README.txt | 16 ++++++++-------- trunk/jsoncpp/{amalgate.py => amalgamate.py} | 20 ++++++++++---------- trunk/jsoncpp/include/json/config.h | 2 +- trunk/jsoncpp/include/json/features.h | 4 ++-- trunk/jsoncpp/include/json/forwards.h | 4 ++-- trunk/jsoncpp/include/json/reader.h | 4 ++-- trunk/jsoncpp/include/json/value.h | 4 ++-- trunk/jsoncpp/include/json/writer.h | 4 ++-- trunk/jsoncpp/makerelease.py | 16 ++++++++-------- trunk/jsoncpp/src/lib_json/json_reader.cpp | 4 ++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 8 ++++---- trunk/jsoncpp/src/lib_json/json_writer.cpp | 4 ++-- 13 files changed, 47 insertions(+), 47 deletions(-) rename trunk/jsoncpp/{amalgate.py => amalgamate.py} (90%) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 71edd19..e53b880 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -13,8 +13,8 @@ Notes: you need to setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt in start menu) before running scons. - - Added support for amalgated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgated source and header" + - Added support for amalgamated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgamated source and header" for detail. * Value diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index ba70329..51a098a 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -90,30 +90,30 @@ Notes that the documentation is also available for download as a tarball. The documentation of the latest release is available online at: http://jsoncpp.sourceforge.net/ -* Generating amalgated source and header - ====================================== +* Generating amalgamated source and header + ======================================== JsonCpp is provided with a script to generate a single header and a single source file to ease inclusion in an existing project. -The amalgated source can be generated at any time by running the following +The amalgamated source can be generated at any time by running the following command from the top-directory (requires python 2.6): -python amalgate.py +python amalgamate.py It is possible to specify header name. See -h options for detail. By default, the following files are generated: - dist/jsoncpp.cpp: source file that need to be added to your project - dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgated source. This header +equivalent to including json/json.h in non-amalgamated source. This header only depends on standard headers. - dist/json/json-forwards.h: header the provides forward declaration of all JsonCpp types. This typically what should be included in headers to speed-up compilation. -The amalgated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of -other headers. +The amalgamated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion +of other headers. * Using json-cpp in your project: =============================== diff --git a/trunk/jsoncpp/amalgate.py b/trunk/jsoncpp/amalgamate.py similarity index 90% rename from trunk/jsoncpp/amalgate.py rename to trunk/jsoncpp/amalgamate.py index 502971c..1476a5f 100644 --- a/trunk/jsoncpp/amalgate.py +++ b/trunk/jsoncpp/amalgamate.py @@ -9,7 +9,7 @@ import os.path import sys -class AmalagatedFile: +class AmalgamationFile: def __init__( self, top_dir ): self.top_dir = top_dir self.blocks = [] @@ -47,9 +47,9 @@ def write_to( self, output_path ): f.write( self.get_value() ) f.close() -def amalgate_source( source_top_dir=None, - target_source_path=None, - header_include_path=None ): +def amalgamate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): """Produces amalgated source. Parameters: source_top_dir: top-directory @@ -57,7 +57,7 @@ def amalgate_source( source_top_dir=None, header_include_path: generated header path relative to target_source_path. """ print 'Amalgating header...' - header = AmalagatedFile( source_top_dir ) + header = AmalgamationFile( source_top_dir ) header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) header.add_file( 'LICENSE', wrap_in_comment=True ) @@ -81,7 +81,7 @@ def amalgate_source( source_top_dir=None, base, ext = os.path.splitext( header_include_path ) forward_header_include_path = base + '-forwards' + ext print 'Amalgating forward header...' - header = AmalagatedFile( source_top_dir ) + header = AmalgamationFile( source_top_dir ) header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) @@ -101,7 +101,7 @@ def amalgate_source( source_top_dir=None, header.write_to( target_forward_header_path ) print 'Amalgating source...' - source = AmalagatedFile( source_top_dir ) + source = AmalgamationFile( source_top_dir ) source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) source.add_file( 'LICENSE', wrap_in_comment=True ) @@ -134,9 +134,9 @@ def main(): parser.enable_interspersed_args() options, args = parser.parse_args() - msg = amalgate_source( source_top_dir=options.top_dir, - target_source_path=options.target_source_path, - header_include_path=options.header_include_path ) + msg = amalgamate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) if msg: sys.stderr.write( msg + '\n' ) sys.exit( 1 ) diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 24991d5..7609d45 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -31,7 +31,7 @@ /// If defined, indicates that the source file is amalgated /// to prevent private header inclusion. /// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGATED +// #define JSON_IS_AMALGAMATION # ifdef JSON_IN_CPPTL diff --git a/trunk/jsoncpp/include/json/features.h b/trunk/jsoncpp/include/json/features.h index 0b53db1..4353278 100644 --- a/trunk/jsoncpp/include/json/features.h +++ b/trunk/jsoncpp/include/json/features.h @@ -6,9 +6,9 @@ #ifndef CPPTL_JSON_FEATURES_H_INCLUDED # define CPPTL_JSON_FEATURES_H_INCLUDED -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) namespace Json { diff --git a/trunk/jsoncpp/include/json/forwards.h b/trunk/jsoncpp/include/json/forwards.h index 083d44f..ab863da 100644 --- a/trunk/jsoncpp/include/json/forwards.h +++ b/trunk/jsoncpp/include/json/forwards.h @@ -6,9 +6,9 @@ #ifndef JSON_FORWARDS_H_INCLUDED # define JSON_FORWARDS_H_INCLUDED -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include "config.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) namespace Json { diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 34dc785..0a324df 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -6,10 +6,10 @@ #ifndef CPPTL_JSON_READER_H_INCLUDED # define CPPTL_JSON_READER_H_INCLUDED -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include "features.h" # include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) # include # include # include diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index c9c7e1d..32e3455 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -6,9 +6,9 @@ #ifndef CPPTL_JSON_H_INCLUDED # define CPPTL_JSON_H_INCLUDED -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include "forwards.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) # include # include diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index cb0bd9b..4789363 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -6,9 +6,9 @@ #ifndef JSON_WRITER_H_INCLUDED # define JSON_WRITER_H_INCLUDED -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include "value.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) # include # include # include diff --git a/trunk/jsoncpp/makerelease.py b/trunk/jsoncpp/makerelease.py index a6e330e..6b8eec3 100644 --- a/trunk/jsoncpp/makerelease.py +++ b/trunk/jsoncpp/makerelease.py @@ -23,7 +23,7 @@ import os import time from devtools import antglob, fixeol, tarball -import amalgate +import amalgamate SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' @@ -323,13 +323,13 @@ def main(): print 'Generating source tarball to', source_tarball_path tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir - print 'Generating amalgated source tarball to', amalgated_tarball_path - amalgated_dir = 'dist/amalgated' - amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' ) - amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version - tarball.make_tarball( amalgated_tarball_path, [amalgated_dir], - amalgated_dir, prefix_dir=amalgated_source_dir ) + amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir + print 'Generating amalgamation source tarball to', amalgamation_tarball_path + amalgamation_dir = 'dist/amalgamation' + amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) + amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version + tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], + amalgamation_dir, prefix_dir=amalgamation_source_dir ) # Decompress source tarball, download and install scons-local distcheck_dir = 'dist/distcheck' diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index fa46dca..8bb0304 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -3,11 +3,11 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include # include # include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) #include #include #include diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index ec3fb2e..ff98f63 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -3,13 +3,13 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include # include # ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR # include "json_batchallocator.h" # endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) #include #include #include @@ -83,14 +83,14 @@ releaseStringValue( char *value ) // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # ifdef JSON_VALUE_USE_INTERNAL_MAP # include "json_internalarray.inl" # include "json_internalmap.inl" # endif // JSON_VALUE_USE_INTERNAL_MAP # include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) namespace Json { diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 90e5c1b..1bda183 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -3,10 +3,10 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#if !defined(JSON_IS_AMALGATED) +#if !defined(JSON_IS_AMALGAMATION) # include # include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGATED) +#endif // if !defined(JSON_IS_AMALGAMATION) #include #include #include From 9897804a8eaf57ce41d131f06a07d7b485e04bab Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 21:30:42 +0000 Subject: [PATCH 188/268] Release 0.6.0-rc2 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@188 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 7defe1e..673adfb 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.6.0-dev \ No newline at end of file +0.6.0-rc2 \ No newline at end of file From 6efb8b231ed1e8e87978e081aba3270ad184783c Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 21:31:33 +0000 Subject: [PATCH 189/268] Release 0.6.0-rc2 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@189 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc2/AUTHORS | 1 + tags/jsoncpp/0.6.0-rc2/LICENSE | 55 + tags/jsoncpp/0.6.0-rc2/NEWS.txt | 101 + tags/jsoncpp/0.6.0-rc2/README.txt | 172 ++ tags/jsoncpp/0.6.0-rc2/SConstruct | 248 ++ tags/jsoncpp/0.6.0-rc2/amalgamate.py | 147 ++ tags/jsoncpp/0.6.0-rc2/devtools/__init__.py | 1 + tags/jsoncpp/0.6.0-rc2/devtools/antglob.py | 201 ++ tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py | 63 + .../0.6.0-rc2/devtools/licenseupdater.py | 93 + tags/jsoncpp/0.6.0-rc2/devtools/tarball.py | 53 + tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.6.0-rc2/doc/footer.html | 23 + tags/jsoncpp/0.6.0-rc2/doc/header.html | 24 + tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox | 126 + tags/jsoncpp/0.6.0-rc2/doc/readme.txt | 1 + tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox | 37 + tags/jsoncpp/0.6.0-rc2/doxybuild.py | 169 ++ .../jsoncpp/0.6.0-rc2/include/json/autolink.h | 24 + tags/jsoncpp/0.6.0-rc2/include/json/config.h | 96 + .../jsoncpp/0.6.0-rc2/include/json/features.h | 49 + .../jsoncpp/0.6.0-rc2/include/json/forwards.h | 44 + tags/jsoncpp/0.6.0-rc2/include/json/json.h | 15 + tags/jsoncpp/0.6.0-rc2/include/json/reader.h | 214 ++ tags/jsoncpp/0.6.0-rc2/include/json/value.h | 1103 +++++++++ tags/jsoncpp/0.6.0-rc2/include/json/writer.h | 185 ++ .../0.6.0-rc2/makefiles/vs71/jsoncpp.sln | 46 + .../0.6.0-rc2/makefiles/vs71/jsontest.vcproj | 119 + .../0.6.0-rc2/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.6.0-rc2/makerelease.py | 380 +++ .../jsoncpp/0.6.0-rc2/scons-tools/globtool.py | 53 + tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py | 179 ++ .../0.6.0-rc2/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py | 82 + .../0.6.0-rc2/src/jsontestrunner/main.cpp | 269 +++ .../0.6.0-rc2/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../0.6.0-rc2/src/lib_json/json_reader.cpp | 880 +++++++ .../0.6.0-rc2/src/lib_json/json_tool.h | 93 + .../0.6.0-rc2/src/lib_json/json_value.cpp | 1829 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../0.6.0-rc2/src/lib_json/json_writer.cpp | 838 +++++++ .../jsoncpp/0.6.0-rc2/src/lib_json/sconscript | 8 + .../0.6.0-rc2/src/test_lib_json/jsontest.cpp | 608 +++++ .../0.6.0-rc2/src/test_lib_json/jsontest.h | 259 ++ .../0.6.0-rc2/src/test_lib_json/main.cpp | 430 ++++ .../0.6.0-rc2/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.6.0-rc2/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../0.6.0-rc2/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../0.6.0-rc2/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../0.6.0-rc2/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../0.6.0-rc2/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../0.6.0-rc2/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../0.6.0-rc2/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../0.6.0-rc2/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../0.6.0-rc2/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../0.6.0-rc2/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../0.6.0-rc2/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../0.6.0-rc2/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../0.6.0-rc2/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../0.6.0-rc2/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../0.6.0-rc2/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../0.6.0-rc2/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../0.6.0-rc2/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../0.6.0-rc2/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../0.6.0-rc2/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../0.6.0-rc2/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../0.6.0-rc2/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../0.6.0-rc2/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../0.6.0-rc2/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.6.0-rc2/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../0.6.0-rc2/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../0.6.0-rc2/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../0.6.0-rc2/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../0.6.0-rc2/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.6.0-rc2/test/data/test_real_01.expected | 2 + .../0.6.0-rc2/test/data/test_real_01.json | 3 + .../0.6.0-rc2/test/data/test_real_02.expected | 2 + .../0.6.0-rc2/test/data/test_real_02.json | 3 + .../0.6.0-rc2/test/data/test_real_03.expected | 2 + .../0.6.0-rc2/test/data/test_real_03.json | 3 + .../0.6.0-rc2/test/data/test_real_04.expected | 2 + .../0.6.0-rc2/test/data/test_real_04.json | 3 + .../0.6.0-rc2/test/data/test_real_05.expected | 3 + .../0.6.0-rc2/test/data/test_real_05.json | 3 + .../0.6.0-rc2/test/data/test_real_06.expected | 3 + .../0.6.0-rc2/test/data/test_real_06.json | 3 + .../0.6.0-rc2/test/data/test_real_07.expected | 3 + .../0.6.0-rc2/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../0.6.0-rc2/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../0.6.0-rc2/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../0.6.0-rc2/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../0.6.0-rc2/test/generate_expected.py | 11 + .../0.6.0-rc2/test/jsonchecker/fail1.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail10.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail11.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail12.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail13.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail14.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail15.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail16.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail17.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail18.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail19.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail2.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail20.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail21.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail22.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail23.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail24.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail25.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail26.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail27.json | 2 + .../0.6.0-rc2/test/jsonchecker/fail28.json | 2 + .../0.6.0-rc2/test/jsonchecker/fail29.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail3.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail30.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail31.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail32.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail33.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail4.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail5.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail6.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail7.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail8.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail9.json | 1 + .../0.6.0-rc2/test/jsonchecker/pass1.json | 58 + .../0.6.0-rc2/test/jsonchecker/pass2.json | 1 + .../0.6.0-rc2/test/jsonchecker/pass3.json | 6 + .../0.6.0-rc2/test/jsonchecker/readme.txt | 3 + .../0.6.0-rc2/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.6.0-rc2/test/runjsontests.py | 134 ++ tags/jsoncpp/0.6.0-rc2/test/rununittests.py | 73 + tags/jsoncpp/0.6.0-rc2/version | 1 + 186 files changed, 15651 insertions(+) create mode 100644 tags/jsoncpp/0.6.0-rc2/AUTHORS create mode 100644 tags/jsoncpp/0.6.0-rc2/LICENSE create mode 100644 tags/jsoncpp/0.6.0-rc2/NEWS.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/README.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/SConstruct create mode 100644 tags/jsoncpp/0.6.0-rc2/amalgamate.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/__init__.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/antglob.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/tarball.py create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/footer.html create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/header.html create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.6.0-rc2/doxybuild.py create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/autolink.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/config.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/features.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/forwards.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/json.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/reader.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/value.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/writer.h create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makerelease.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/test/cleantests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/generate_expected.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/runjsontests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/rununittests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/version diff --git a/tags/jsoncpp/0.6.0-rc2/AUTHORS b/tags/jsoncpp/0.6.0-rc2/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc2/LICENSE b/tags/jsoncpp/0.6.0-rc2/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/0.6.0-rc2/NEWS.txt b/tags/jsoncpp/0.6.0-rc2/NEWS.txt new file mode 100644 index 0000000..e53b880 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/NEWS.txt @@ -0,0 +1,101 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgamated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgamated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + + - Fixed Value::operator <= implementation (had the semantic of operator >=). + Found when addigin unit tests for comparison operators. + + - Value::compare() is now const and has an actual implementation with + unit tests. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc2/README.txt b/tags/jsoncpp/0.6.0-rc2/README.txt new file mode 100644 index 0000000..51a098a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgamated source and header + ======================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgamated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgamate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgamated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgamated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion +of other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc2/SConstruct b/tags/jsoncpp/0.6.0-rc2/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.6.0-rc2/amalgamate.py b/tags/jsoncpp/0.6.0-rc2/amalgamate.py new file mode 100644 index 0000000..1476a5f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/amalgamate.py @@ -0,0 +1,147 @@ +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalgamationFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( '\n' ): + text += '\n' + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( '' ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '' ) + add_marker( 'Beginning' ) + f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) + content = f.read() + if wrap_in_comment: + content = '/*\n' + content + '\n*/' + self.add_text( content ) + f.close() + add_marker( 'End' ) + self.add_text( '\n\n\n\n' ) + + def get_value( self ): + return ''.join( self.blocks ).replace('\r\n','\n') + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, 'wb' ) + f.write( self.get_value() ) + f.close() + +def amalgamate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print 'Amalgating header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_file( 'include/json/features.h' ) + header.add_file( 'include/json/value.h' ) + header.add_file( 'include/json/reader.h' ) + header.add_file( 'include/json/writer.h' ) + header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print 'Writing amalgated header to %r' % target_header_path + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + '-forwards' + ext + print 'Amalgating forward header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) + header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print 'Writing amalgated forward header to %r' % target_forward_header_path + header.write_to( target_forward_header_path ) + + print 'Amalgating source...' + source = AmalgamationFile( source_top_dir ) + source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) + source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + source.add_file( 'LICENSE', wrap_in_comment=True ) + source.add_text( '' ) + source.add_text( '#include <%s>' % header_include_path ) + source.add_text( '' ) + source.add_file( 'src/lib_json\json_tool.h' ) + source.add_file( 'src/lib_json\json_reader.cpp' ) + source.add_file( 'src/lib_json\json_batchallocator.h' ) + source.add_file( 'src/lib_json\json_valueiterator.inl' ) + source.add_file( 'src/lib_json\json_value.cpp' ) + source.add_file( 'src/lib_json\json_writer.cpp' ) + + print 'Writing amalgated source to %r' % target_source_path + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', + help="""Output .cpp source path. [Default: %default]""") + parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgamate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + '\n' ) + sys.exit( 1 ) + else: + print 'Source succesfully amalagated' + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc2/doc/footer.html b/tags/jsoncpp/0.6.0-rc2/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.6.0-rc2/doc/header.html b/tags/jsoncpp/0.6.0-rc2/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doc/readme.txt b/tags/jsoncpp/0.6.0-rc2/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox new file mode 100644 index 0000000..e6fc17a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox @@ -0,0 +1,37 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking + \section testing Testing + - Adds more tests for unicode parsing (e.g. including surrogate and error detection). +*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doxybuild.py b/tags/jsoncpp/0.6.0-rc2/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/config.h b/tags/jsoncpp/0.6.0-rc2/include/json/config.h new file mode 100644 index 0000000..7609d45 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGAMATION + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/features.h b/tags/jsoncpp/0.6.0-rc2/include/json/features.h new file mode 100644 index 0000000..4353278 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h new file mode 100644 index 0000000..ab863da --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/json.h b/tags/jsoncpp/0.6.0-rc2/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/reader.h b/tags/jsoncpp/0.6.0-rc2/include/json/reader.h new file mode 100644 index 0000000..0a324df --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/value.h b/tags/jsoncpp/0.6.0-rc2/include/json/value.h new file mode 100644 index 0000000..32e3455 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ) const; + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/writer.h b/tags/jsoncpp/0.6.0-rc2/include/json/writer.h new file mode 100644 index 0000000..4789363 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makerelease.py b/tags/jsoncpp/0.6.0-rc2/makerelease.py new file mode 100644 index 0000000..6b8eec3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgamate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir + print 'Generating amalgamation source tarball to', amalgamation_tarball_path + amalgamation_dir = 'dist/amalgamation' + amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) + amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version + tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], + amalgamation_dir, prefix_dir=amalgamation_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..8bb0304 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &/*tokenStart*/ ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &/*tokenStart*/ ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + for (;;) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + for (;;) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp new file mode 100644 index 0000000..ff98f63 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp @@ -0,0 +1,1829 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) JSON_FAIL_MESSAGE( message ) + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +static const unsigned int unknown = (unsigned)-1; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGAMATION) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) const +{ + if ( *this < other ) + return -1; + if ( *this > other ) + return 1; + return 0; +} + + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other < *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..1bda183 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + for (;;) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + for (;;) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp new file mode 100644 index 0000000..3275219 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp @@ -0,0 +1,430 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); + + void checkIsLess( const Json::Value &x, const Json::Value &y ); + + void checkIsEqual( const Json::Value &x, const Json::Value &y ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + +JSONTEST_FIXTURE( ValueTest, compareNull ) +{ + JSONTEST_ASSERT_PRED( checkIsEqual( Json::Value(), Json::Value() ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10, -10 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10, 0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareUInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0u, 10u ) ); + JSONTEST_ASSERT_PRED( checkIsLess( 0u, Json::Value::maxUInt ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10u, 10u ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareDouble ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10.0, -10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10.0, 0.0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareString ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( "", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "", "a" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abcd", "zyui" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abc", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "abcd", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( " ", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "ABCD", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "ABCD", "ABCD" ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareBoolean ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( false, true ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( false, false ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( true, true ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareArray ) +{ + // array compare size then content + Json::Value emptyArray(Json::arrayValue); + Json::Value l1aArray; + l1aArray.append( 0 ); + Json::Value l1bArray; + l1bArray.append( 10 ); + Json::Value l2aArray; + l2aArray.append( 0 ); + l2aArray.append( 0 ); + Json::Value l2bArray; + l2bArray.append( 0 ); + l2bArray.append( 10 ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l1aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l2aArray, l2bArray ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyArray, Json::Value( emptyArray ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aArray, Json::Value( l1aArray) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2bArray, Json::Value( l2bArray) ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareObject ) +{ + // object compare size then content + Json::Value emptyObject(Json::objectValue); + Json::Value l1aObject; + l1aObject["key1"] = 0; + Json::Value l1bObject; + l1aObject["key1"] = 10; + Json::Value l2aObject; + l2aObject["key1"] = 0; + l2aObject["key2"] = 0; + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l1aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyObject, Json::Value( emptyObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aObject, Json::Value( l1aObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2aObject, Json::Value( l2aObject ) ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareType ) +{ + // object of different type are ordered according to their type + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(), Json::Value(1) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1), Json::Value(1u) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1u), Json::Value(1.0) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1.0), Json::Value("a") ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value("a"), Json::Value(true) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(true), Json::Value(Json::arrayValue) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(Json::arrayValue), Json::Value(Json::objectValue) ) ); +} + + +void +ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x < y ); + JSONTEST_ASSERT( y > x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x == y) ); + JSONTEST_ASSERT( !(y == x) ); + JSONTEST_ASSERT( !(x >= y) ); + JSONTEST_ASSERT( !(y <= x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( x.compare( y ) < 0 ); + JSONTEST_ASSERT( y.compare( x ) >= 0 ); +} + + +void +ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x == y ); + JSONTEST_ASSERT( y == x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y <= x ); + JSONTEST_ASSERT( x >= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x < y) ); + JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y > x) ); + JSONTEST_ASSERT( x.compare( y ) == 0 ); + JSONTEST_ASSERT( y.compare( x ) == 0 ); +} + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareBoolean ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareType ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/test/cleantests.py b/tags/jsoncpp/0.6.0-rc2/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/test/rununittests.py b/tags/jsoncpp/0.6.0-rc2/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/version b/tags/jsoncpp/0.6.0-rc2/version new file mode 100644 index 0000000..673adfb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/version @@ -0,0 +1 @@ +0.6.0-rc2 \ No newline at end of file From 94695289c13d419e201bd77b5a8a3873d95afe31 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 21:46:14 +0000 Subject: [PATCH 190/268] Overwriting previous tag git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@190 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc2/AUTHORS | 1 - tags/jsoncpp/0.6.0-rc2/LICENSE | 55 - tags/jsoncpp/0.6.0-rc2/NEWS.txt | 101 - tags/jsoncpp/0.6.0-rc2/README.txt | 172 -- tags/jsoncpp/0.6.0-rc2/SConstruct | 248 -- tags/jsoncpp/0.6.0-rc2/amalgamate.py | 147 -- tags/jsoncpp/0.6.0-rc2/devtools/__init__.py | 1 - tags/jsoncpp/0.6.0-rc2/devtools/antglob.py | 201 -- tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py | 63 - .../0.6.0-rc2/devtools/licenseupdater.py | 93 - tags/jsoncpp/0.6.0-rc2/devtools/tarball.py | 53 - tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in | 1534 ------------ tags/jsoncpp/0.6.0-rc2/doc/footer.html | 23 - tags/jsoncpp/0.6.0-rc2/doc/header.html | 24 - tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox | 126 - tags/jsoncpp/0.6.0-rc2/doc/readme.txt | 1 - tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox | 37 - tags/jsoncpp/0.6.0-rc2/doxybuild.py | 169 -- .../jsoncpp/0.6.0-rc2/include/json/autolink.h | 24 - tags/jsoncpp/0.6.0-rc2/include/json/config.h | 96 - .../jsoncpp/0.6.0-rc2/include/json/features.h | 49 - .../jsoncpp/0.6.0-rc2/include/json/forwards.h | 44 - tags/jsoncpp/0.6.0-rc2/include/json/json.h | 15 - tags/jsoncpp/0.6.0-rc2/include/json/reader.h | 214 -- tags/jsoncpp/0.6.0-rc2/include/json/value.h | 1103 --------- tags/jsoncpp/0.6.0-rc2/include/json/writer.h | 185 -- .../0.6.0-rc2/makefiles/vs71/jsoncpp.sln | 46 - .../0.6.0-rc2/makefiles/vs71/jsontest.vcproj | 119 - .../0.6.0-rc2/makefiles/vs71/lib_json.vcproj | 214 -- .../makefiles/vs71/test_lib_json.vcproj | 130 - tags/jsoncpp/0.6.0-rc2/makerelease.py | 380 --- .../jsoncpp/0.6.0-rc2/scons-tools/globtool.py | 53 - tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py | 179 -- .../0.6.0-rc2/scons-tools/substinfile.py | 79 - tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py | 82 - .../0.6.0-rc2/src/jsontestrunner/main.cpp | 269 --- .../0.6.0-rc2/src/jsontestrunner/sconscript | 9 - .../src/lib_json/json_batchallocator.h | 130 - .../src/lib_json/json_internalarray.inl | 456 ---- .../src/lib_json/json_internalmap.inl | 615 ----- .../0.6.0-rc2/src/lib_json/json_reader.cpp | 880 ------- .../0.6.0-rc2/src/lib_json/json_tool.h | 93 - .../0.6.0-rc2/src/lib_json/json_value.cpp | 1829 -------------- .../src/lib_json/json_valueiterator.inl | 299 --- .../0.6.0-rc2/src/lib_json/json_writer.cpp | 838 ------- .../jsoncpp/0.6.0-rc2/src/lib_json/sconscript | 8 - .../0.6.0-rc2/src/test_lib_json/jsontest.cpp | 608 ----- .../0.6.0-rc2/src/test_lib_json/jsontest.h | 259 -- .../0.6.0-rc2/src/test_lib_json/main.cpp | 430 ---- .../0.6.0-rc2/src/test_lib_json/sconscript | 10 - tags/jsoncpp/0.6.0-rc2/test/cleantests.py | 10 - .../test/data/fail_test_array_01.json | 1 - .../test/data/test_array_01.expected | 1 - .../0.6.0-rc2/test/data/test_array_01.json | 1 - .../test/data/test_array_02.expected | 2 - .../0.6.0-rc2/test/data/test_array_02.json | 1 - .../test/data/test_array_03.expected | 6 - .../0.6.0-rc2/test/data/test_array_03.json | 1 - .../test/data/test_array_04.expected | 5 - .../0.6.0-rc2/test/data/test_array_04.json | 1 - .../test/data/test_array_05.expected | 100 - .../0.6.0-rc2/test/data/test_array_05.json | 1 - .../test/data/test_array_06.expected | 5 - .../0.6.0-rc2/test/data/test_array_06.json | 4 - .../test/data/test_basic_01.expected | 1 - .../0.6.0-rc2/test/data/test_basic_01.json | 1 - .../test/data/test_basic_02.expected | 1 - .../0.6.0-rc2/test/data/test_basic_02.json | 1 - .../test/data/test_basic_03.expected | 3 - .../0.6.0-rc2/test/data/test_basic_03.json | 3 - .../test/data/test_basic_04.expected | 2 - .../0.6.0-rc2/test/data/test_basic_04.json | 2 - .../test/data/test_basic_05.expected | 2 - .../0.6.0-rc2/test/data/test_basic_05.json | 2 - .../test/data/test_basic_06.expected | 2 - .../0.6.0-rc2/test/data/test_basic_06.json | 2 - .../test/data/test_basic_07.expected | 2 - .../0.6.0-rc2/test/data/test_basic_07.json | 2 - .../test/data/test_basic_08.expected | 2 - .../0.6.0-rc2/test/data/test_basic_08.json | 3 - .../test/data/test_basic_09.expected | 2 - .../0.6.0-rc2/test/data/test_basic_09.json | 4 - .../test/data/test_comment_01.expected | 8 - .../0.6.0-rc2/test/data/test_comment_01.json | 8 - .../test/data/test_complex_01.expected | 20 - .../0.6.0-rc2/test/data/test_complex_01.json | 17 - .../test/data/test_integer_01.expected | 1 - .../0.6.0-rc2/test/data/test_integer_01.json | 2 - .../test/data/test_integer_02.expected | 1 - .../0.6.0-rc2/test/data/test_integer_02.json | 2 - .../test/data/test_integer_03.expected | 1 - .../0.6.0-rc2/test/data/test_integer_03.json | 2 - .../test/data/test_integer_04.expected | 2 - .../0.6.0-rc2/test/data/test_integer_04.json | 3 - .../test/data/test_integer_05.expected | 2 - .../0.6.0-rc2/test/data/test_integer_05.json | 2 - .../test/data/test_integer_06_64bits.expected | 1 - .../test/data/test_integer_06_64bits.json | 2 - .../test/data/test_integer_07_64bits.expected | 1 - .../test/data/test_integer_07_64bits.json | 2 - .../test/data/test_integer_08_64bits.expected | 1 - .../test/data/test_integer_08_64bits.json | 2 - .../test/data/test_large_01.expected | 2122 ----------------- .../0.6.0-rc2/test/data/test_large_01.json | 2 - .../test/data/test_object_01.expected | 1 - .../0.6.0-rc2/test/data/test_object_01.json | 1 - .../test/data/test_object_02.expected | 2 - .../0.6.0-rc2/test/data/test_object_02.json | 1 - .../test/data/test_object_03.expected | 4 - .../0.6.0-rc2/test/data/test_object_03.json | 5 - .../test/data/test_object_04.expected | 2 - .../0.6.0-rc2/test/data/test_object_04.json | 3 - .../data/test_preserve_comment_01.expected | 3 - .../test/data/test_preserve_comment_01.json | 14 - .../0.6.0-rc2/test/data/test_real_01.expected | 2 - .../0.6.0-rc2/test/data/test_real_01.json | 3 - .../0.6.0-rc2/test/data/test_real_02.expected | 2 - .../0.6.0-rc2/test/data/test_real_02.json | 3 - .../0.6.0-rc2/test/data/test_real_03.expected | 2 - .../0.6.0-rc2/test/data/test_real_03.json | 3 - .../0.6.0-rc2/test/data/test_real_04.expected | 2 - .../0.6.0-rc2/test/data/test_real_04.json | 3 - .../0.6.0-rc2/test/data/test_real_05.expected | 3 - .../0.6.0-rc2/test/data/test_real_05.json | 3 - .../0.6.0-rc2/test/data/test_real_06.expected | 3 - .../0.6.0-rc2/test/data/test_real_06.json | 3 - .../0.6.0-rc2/test/data/test_real_07.expected | 3 - .../0.6.0-rc2/test/data/test_real_07.json | 3 - .../test/data/test_string_01.expected | 1 - .../0.6.0-rc2/test/data/test_string_01.json | 1 - .../test/data/test_string_02.expected | 1 - .../0.6.0-rc2/test/data/test_string_02.json | 1 - .../test/data/test_string_03.expected | 1 - .../0.6.0-rc2/test/data/test_string_03.json | 1 - .../test/data/test_string_unicode_01.expected | 1 - .../test/data/test_string_unicode_01.json | 1 - .../test/data/test_string_unicode_02.expected | 1 - .../test/data/test_string_unicode_02.json | 1 - .../test/data/test_string_unicode_03.expected | 1 - .../test/data/test_string_unicode_03.json | 1 - .../test/data/test_string_unicode_04.expected | 1 - .../test/data/test_string_unicode_04.json | 1 - .../test/data/test_string_unicode_05.expected | 2 - .../test/data/test_string_unicode_05.json | 1 - .../0.6.0-rc2/test/generate_expected.py | 11 - .../0.6.0-rc2/test/jsonchecker/fail1.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail10.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail11.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail12.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail13.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail14.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail15.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail16.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail17.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail18.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail19.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail2.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail20.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail21.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail22.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail23.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail24.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail25.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail26.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail27.json | 2 - .../0.6.0-rc2/test/jsonchecker/fail28.json | 2 - .../0.6.0-rc2/test/jsonchecker/fail29.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail3.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail30.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail31.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail32.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail33.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail4.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail5.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail6.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail7.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail8.json | 1 - .../0.6.0-rc2/test/jsonchecker/fail9.json | 1 - .../0.6.0-rc2/test/jsonchecker/pass1.json | 58 - .../0.6.0-rc2/test/jsonchecker/pass2.json | 1 - .../0.6.0-rc2/test/jsonchecker/pass3.json | 6 - .../0.6.0-rc2/test/jsonchecker/readme.txt | 3 - .../0.6.0-rc2/test/pyjsontestrunner.py | 64 - tags/jsoncpp/0.6.0-rc2/test/runjsontests.py | 134 -- tags/jsoncpp/0.6.0-rc2/test/rununittests.py | 73 - tags/jsoncpp/0.6.0-rc2/version | 1 - 186 files changed, 15651 deletions(-) delete mode 100644 tags/jsoncpp/0.6.0-rc2/AUTHORS delete mode 100644 tags/jsoncpp/0.6.0-rc2/LICENSE delete mode 100644 tags/jsoncpp/0.6.0-rc2/NEWS.txt delete mode 100644 tags/jsoncpp/0.6.0-rc2/README.txt delete mode 100644 tags/jsoncpp/0.6.0-rc2/SConstruct delete mode 100644 tags/jsoncpp/0.6.0-rc2/amalgamate.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/__init__.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/antglob.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/tarball.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/footer.html delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/header.html delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox delete mode 100644 tags/jsoncpp/0.6.0-rc2/doxybuild.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/autolink.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/config.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/features.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/forwards.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/json.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/reader.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/value.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/writer.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln delete mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj delete mode 100644 tags/jsoncpp/0.6.0-rc2/makerelease.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp delete mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/cleantests.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/generate_expected.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/runjsontests.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/test/rununittests.py delete mode 100644 tags/jsoncpp/0.6.0-rc2/version diff --git a/tags/jsoncpp/0.6.0-rc2/AUTHORS b/tags/jsoncpp/0.6.0-rc2/AUTHORS deleted file mode 100644 index c0fbbee..0000000 --- a/tags/jsoncpp/0.6.0-rc2/AUTHORS +++ /dev/null @@ -1 +0,0 @@ -Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc2/LICENSE b/tags/jsoncpp/0.6.0-rc2/LICENSE deleted file mode 100644 index ca2bfe1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -The JsonCpp library's source code, including accompanying documentation, -tests and demonstration applications, are licensed under the following -conditions... - -The author (Baptiste Lepilleur) explicitly disclaims copyright in all -jurisdictions which recognize such a disclaimer. In such jurisdictions, -this software is released into the Public Domain. - -In jurisdictions which do not recognize Public Domain property (e.g. Germany as of -2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is -released under the terms of the MIT License (see below). - -In jurisdictions which recognize Public Domain property, the user of this -software may choose to accept it either as 1) Public Domain, 2) under the -conditions of the MIT License (see below), or 3) under the terms of dual -Public Domain/MIT License conditions described here, as they choose. - -The MIT License is about as close to Public Domain as a license can get, and is -described in clear, concise terms at: - - http://en.wikipedia.org/wiki/MIT_License - -The full text of the MIT License follows: - -======================================================================== -Copyright (c) 2007-2010 Baptiste Lepilleur - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, copy, -modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -======================================================================== -(END LICENSE TEXT) - -The MIT license is compatible with both the GPL and commercial -software, affording one all of the rights of Public Domain with the -minor nuisance of being required to keep the above copyright notice -and license text in the source code. Note also that by accepting the -Public Domain "license" you can re-license your copy using whatever -license you like. diff --git a/tags/jsoncpp/0.6.0-rc2/NEWS.txt b/tags/jsoncpp/0.6.0-rc2/NEWS.txt deleted file mode 100644 index e53b880..0000000 --- a/tags/jsoncpp/0.6.0-rc2/NEWS.txt +++ /dev/null @@ -1,101 +0,0 @@ - New in JsonCpp 0.6.0: - --------------------- - -* Compilation - - - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now - propagated to the build environment as this is required for some - compiler installation. - - - Added support for Microsoft Visual Studio 2008 (bug #2930462): - The platform "msvc90" has been added. - - Notes: you need to setup the environment by running vcvars32.bat - (e.g. MSVC 2008 command prompt in start menu) before running scons. - - - Added support for amalgamated source and header generation (a la sqlite). - Refer to README.txt section "Generating amalgamated source and header" - for detail. - -* Value - - - Removed experimental ValueAllocator, it caused static - initialization/destruction order issues (bug #2934500). - The DefaultValueAllocator has been inlined in code. - - - Added support for 64 bits integer: - - Types Json::Int64 and Json::UInt64 have been added. They are aliased - to 64 bits integers on system that support them (based on __int64 on - Microsoft Visual Studio platform, and long long on other platforms). - - Types Json::LargestInt and Json::LargestUInt have been added. They are - aliased to the largest integer type supported: - either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. - - Json::Value::asInt() and Json::Value::asUInt() still returns plain - "int" based types, but asserts if an attempt is made to retrieve - a 64 bits value that can not represented as the return type. - - Json::Value::asInt64() and Json::Value::asUInt64() have been added - to obtain the 64 bits integer value. - - Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns - the integer as a LargestInt/LargestUInt respectively. Those functions - functions are typically used when implementing writer. - - The reader attempts to read number as 64 bits integer, and fall back - to reading a double if the number is not in the range of 64 bits - integer. - - Warning: Json::Value::asInt() and Json::Value::asUInt() now returns - long long. This changes break code that was passing the return value - to *printf() function. - - Support for 64 bits integer can be disabled by defining the macro - JSON_NO_INT64 (uncomment it in json/config.h for example), though - it should have no impact on existing usage. - - - The type Json::ArrayIndex is used for indexes of a JSON value array. It - is an unsigned int (typically 32 bits). - - - Array index can be passed as int to operator[], allowing use of literal: - Json::Value array; - array.append( 1234 ); - int value = array[0].asInt(); // did not compile previously - - - Added float Json::Value::asFloat() to obtain a floating point value as a - float (avoid lost of precision warning caused by used of asDouble() - to initialize a float). - -* Reader - - - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. - Bug #3023708 (Formatted has 2 't'). The old member function is deprecated - but still present for backward compatibility. - -* Tests - - - Added test to ensure that the escape sequence "\/" is corrected handled - by the parser. - -* Bug fixes - - - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now - correctly detected. - - - Bug #3139678: stack buffer overflow when parsing a double with a - length of 32 characters. - - - Fixed Value::operator <= implementation (had the semantic of operator >=). - Found when addigin unit tests for comparison operators. - - - Value::compare() is now const and has an actual implementation with - unit tests. - -* License - - - See file LICENSE for details. Basically JsonCpp is now licensed under - MIT license, or public domain if desired and recognized in your jurisdiction. - Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who - helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc2/README.txt b/tags/jsoncpp/0.6.0-rc2/README.txt deleted file mode 100644 index 51a098a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/README.txt +++ /dev/null @@ -1,172 +0,0 @@ -* Introduction: - ============= - -JSON (JavaScript Object Notation) is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of -value, and a collection of name/value pairs. - -JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate -JSON value, handle serialization and unserialization to string. - -It can also preserve existing comment in unserialization/serialization steps, -making it a convenient format to store user input files. - -Unserialization parsing is user friendly and provides precise error reports. - - -* Building/Testing: - ================= - -JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires -python to be installed (http://www.python.org). - -You download scons-local distribution from the following url: -http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ - -Unzip it in the directory where you found this README file. scons.py Should be -at the same level as README. - -python scons.py platform=PLTFRM [TARGET] -where PLTFRM may be one of: - suncc Sun C++ (Solaris) - vacpp Visual Age C++ (AIX) - mingw - msvc6 Microsoft Visual Studio 6 service pack 5-6 - msvc70 Microsoft Visual Studio 2002 - msvc71 Microsoft Visual Studio 2003 - msvc80 Microsoft Visual Studio 2005 - msvc90 Microsoft Visual Studio 2008 - linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) - -Notes: if you are building with Microsoft Visual Studio 2008, you need to -setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) -before running scons. - -Adding platform is fairly simple. You need to change the Sconstruct file -to do so. - -and TARGET may be: - check: build library and run unit tests. - - -* Running the test manually: - ========================== - -Notes that test can be run by scons using the 'check' target (see above). - -You need to run test manually only if you are troubleshooting an issue. - -In the instruction below, replace "path to jsontest.exe" with the path -of the 'jsontest' executable that was compiled on your platform. - -cd test -# This will run the Reader/Writer tests -python runjsontests.py "path to jsontest.exe" - -# This will run the Reader/Writer tests, using JSONChecker test suite -# (http://www.json.org/JSON_checker/). -# Notes: not all tests pass: JsonCpp is too lenient (for example, -# it allows an integer to start with '0'). The goal is to improve -# strict mode parsing to get all tests to pass. -python runjsontests.py --with-json-checker "path to jsontest.exe" - -# This will run the unit tests (mostly Value) -python rununittests.py "path to test_lib_json.exe" - -You can run the tests using valgrind: -python rununittests.py --valgrind "path to test_lib_json.exe" - - -* Building the documentation: - =========================== - -Run the python script doxybuild.py from the top directory: - -python doxybuild.py --open --with-dot - -See doxybuild.py --help for options. - -Notes that the documentation is also available for download as a tarball. -The documentation of the latest release is available online at: -http://jsoncpp.sourceforge.net/ - -* Generating amalgamated source and header - ======================================== - -JsonCpp is provided with a script to generate a single header and a single -source file to ease inclusion in an existing project. - -The amalgamated source can be generated at any time by running the following -command from the top-directory (requires python 2.6): - -python amalgamate.py - -It is possible to specify header name. See -h options for detail. By default, -the following files are generated: -- dist/jsoncpp.cpp: source file that need to be added to your project -- dist/json/json.h: header file corresponding to use in your project. It is -equivalent to including json/json.h in non-amalgamated source. This header -only depends on standard headers. -- dist/json/json-forwards.h: header the provides forward declaration -of all JsonCpp types. This typically what should be included in headers to -speed-up compilation. - -The amalgamated sources are generated by concatenating JsonCpp source in the -correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion -of other headers. - -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - -* Adding a reader/writer test: - ============================ - -To add a test, you need to create two files in test/data: -- a TESTNAME.json file, that contains the input document in JSON format. -- a TESTNAME.expected file, that contains a flatened representation of - the input document. - -TESTNAME.expected file format: -- each line represents a JSON element of the element tree represented - by the input document. -- each line has two parts: the path to access the element separated from - the element value by '='. Array and object values are always empty - (e.g. represented by either [] or {}). -- element path: '.' represented the root element, and is used to separate - object members. [N] is used to specify the value of an array element - at index N. -See test_complex_01.json and test_complex_01.expected to better understand -element path. - - -* Understanding reader/writer test output: - ======================================== - -When a test is run, output files are generated aside the input test files. -Below is a short description of the content of each file: - -- test_complex_01.json: input JSON document -- test_complex_01.expected: flattened JSON element tree used to check if - parsing was corrected. - -- test_complex_01.actual: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.json -- test_complex_01.rewrite: JSON document written by jsontest.exe using the - Json::Value parsed from test_complex_01.json and serialized using - Json::StyledWritter. -- test_complex_01.actual-rewrite: flattened JSON element tree produced by - jsontest.exe from reading test_complex_01.rewrite. -test_complex_01.process-output: jsontest.exe output, typically useful to - understand parsing error. - -* License - ======= - -See file LICENSE for details. Basically JsonCpp is licensed under -MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc2/SConstruct b/tags/jsoncpp/0.6.0-rc2/SConstruct deleted file mode 100644 index 23225cb..0000000 --- a/tags/jsoncpp/0.6.0-rc2/SConstruct +++ /dev/null @@ -1,248 +0,0 @@ -""" -Notes: -- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. - -To add a platform: -- add its name in options allowed_values below -- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. -""" - -import os -import os.path -import sys - -JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() -DIST_DIR = '#dist' - -options = Variables() -options.Add( EnumVariable('platform', - 'Platform (compiler/stl) used to build the project', - 'msvc71', - allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), - ignorecase=2) ) - -try: - platform = ARGUMENTS['platform'] - if platform == 'linux-gcc': - CXX = 'g++' # not quite right, but env is not yet available. - import commands - version = commands.getoutput('%s -dumpversion' %CXX) - platform = 'linux-gcc-%s' %version - print "Using platform '%s'" %platform - LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') - LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) - os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH - print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH -except KeyError: - print 'You must specify a "platform"' - sys.exit(2) - -print "Building using PLATFORM =", platform - -rootbuild_dir = Dir('#buildscons') -build_dir = os.path.join( '#buildscons', platform ) -bin_dir = os.path.join( '#bin', platform ) -lib_dir = os.path.join( '#libs', platform ) -sconsign_dir_path = Dir(build_dir).abspath -sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) - -# Ensure build directory exist (SConsignFile fail otherwise!) -if not os.path.exists( sconsign_dir_path ): - os.makedirs( sconsign_dir_path ) - -# Store all dependencies signature in a database -SConsignFile( sconsign_path ) - -def make_environ_vars(): - """Returns a dictionnary with environment variable to use when compiling.""" - # PATH is required to find the compiler - # TEMP is required for at least mingw - # LD_LIBRARY_PATH & co is required on some system for the compiler - vars = {} - for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): - if name in os.environ: - vars[name] = os.environ[name] - return vars - - -env = Environment( ENV = make_environ_vars(), - toolpath = ['scons-tools'], - tools=[] ) #, tools=['default'] ) - -if platform == 'suncc': - env.Tool( 'sunc++' ) - env.Tool( 'sunlink' ) - env.Tool( 'sunar' ) - env.Append( CCFLAGS = ['-mt'] ) -elif platform == 'vacpp': - env.Tool( 'default' ) - env.Tool( 'aixcc' ) - env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! - # using xlC_r ensure multi-threading is enabled: - # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm - env.Append( CCFLAGS = '-qrtti=all', - LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning -elif platform == 'msvc6': - env['MSVS_VERSION']='6.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc70': - env['MSVS_VERSION']='7.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc71': - env['MSVS_VERSION']='7.1' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -GX /nologo /MT' -elif platform == 'msvc80': - env['MSVS_VERSION']='8.0' - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'msvc90': - env['MSVS_VERSION']='9.0' - # Scons 1.2 fails to detect the correct location of the platform SDK. - # So we propagate those from the environment. This requires that the - # user run vcvars32.bat before compiling. - if 'INCLUDE' in os.environ: - env['ENV']['INCLUDE'] = os.environ['INCLUDE'] - if 'LIB' in os.environ: - env['ENV']['LIB'] = os.environ['LIB'] - for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: - env.Tool( tool ) - env['CXXFLAGS']='-GR -EHsc /nologo /MT' -elif platform == 'mingw': - env.Tool( 'mingw' ) - env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) -elif platform.startswith('linux-gcc'): - env.Tool( 'default' ) - env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) - env['SHARED_LIB_ENABLED'] = True -else: - print "UNSUPPORTED PLATFORM." - env.Exit(1) - -env.Tool('targz') -env.Tool('srcdist') -env.Tool('globtool') - -env.Append( CPPPATH = ['#include'], - LIBPATH = lib_dir ) -short_platform = platform -if short_platform.startswith('msvc'): - short_platform = short_platform[2:] -# Notes: on Windows you need to rebuild the source for each variant -# Build script does not support that yet so we only build static libraries. -# This also fails on AIX because both dynamic and static library ends with -# extension .a. -env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) -env['LIB_PLATFORM'] = short_platform -env['LIB_LINK_TYPE'] = 'lib' # static -env['LIB_CRUNTIME'] = 'mt' -env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention -env['JSONCPP_VERSION'] = JSONCPP_VERSION -env['BUILD_DIR'] = env.Dir(build_dir) -env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) -env['DIST_DIR'] = DIST_DIR -if 'TarGz' in env['BUILDERS']: - class SrcDistAdder: - def __init__( self, env ): - self.env = env - def __call__( self, *args, **kw ): - apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) - env['SRCDIST_BUILDER'] = env.TarGz -else: # If tarfile module is missing - class SrcDistAdder: - def __init__( self, env ): - pass - def __call__( self, *args, **kw ): - pass -env['SRCDIST_ADD'] = SrcDistAdder( env ) -env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) - -env_testing = env.Clone( ) -env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) - -def buildJSONExample( env, target_sources, target_name ): - env = env.Clone() - env.Append( CPPPATH = ['#'] ) - exe = env.Program( target=target_name, - source=target_sources ) - env['SRCDIST_ADD']( source=[target_sources] ) - global bin_dir - return env.Install( bin_dir, exe ) - -def buildJSONTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildUnitTests( env, target_sources, target_name ): - jsontests_node = buildJSONExample( env, target_sources, target_name ) - check_alias_target = env.Alias( 'check', jsontests_node, - RunUnitTests( jsontests_node, jsontests_node ) ) - env.AlwaysBuild( check_alias_target ) - -def buildLibrary( env, target_sources, target_name ): - static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - global lib_dir - env.Install( lib_dir, static_lib ) - if env['SHARED_LIB_ENABLED']: - shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', - source=target_sources ) - env.Install( lib_dir, shared_lib ) - env['SRCDIST_ADD']( source=[target_sources] ) - -Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) - -def buildProjectInDirectory( target_directory ): - global build_dir - target_build_dir = os.path.join( build_dir, target_directory ) - target = os.path.join( target_directory, 'sconscript' ) - SConscript( target, build_dir=target_build_dir, duplicate=0 ) - env['SRCDIST_ADD']( source=[target] ) - - -def runJSONTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - data_path = os.path.join( jsontest_path, 'data' ) - import runjsontests - return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) - -def runJSONTests_string( target, source = None, env = None ): - return 'RunJSONTests("%s")' % source[0] - -import SCons.Action -ActionFactory = SCons.Action.ActionFactory -RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) - -def runUnitTests_action( target, source = None, env = None ): - # Add test scripts to python path - jsontest_path = Dir( '#test' ).abspath - sys.path.insert( 0, jsontest_path ) - import rununittests - return rununittests.runAllTests( os.path.abspath(source[0].path) ) - -def runUnitTests_string( target, source = None, env = None ): - return 'RunUnitTests("%s")' % source[0] - -RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) - -env.Alias( 'check' ) - -srcdist_cmd = env['SRCDIST_ADD']( source = """ - AUTHORS README.txt SConstruct - """.split() ) -env.Alias( 'src-dist', srcdist_cmd ) - -buildProjectInDirectory( 'src/jsontestrunner' ) -buildProjectInDirectory( 'src/lib_json' ) -buildProjectInDirectory( 'src/test_lib_json' ) -#print env.Dump() - diff --git a/tags/jsoncpp/0.6.0-rc2/amalgamate.py b/tags/jsoncpp/0.6.0-rc2/amalgamate.py deleted file mode 100644 index 1476a5f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/amalgamate.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Amalgate json-cpp library sources into a single source and header file. - -Requires Python 2.6 - -Example of invocation (must be invoked from json-cpp top directory): -python amalgate.py -""" -import os -import os.path -import sys - -class AmalgamationFile: - def __init__( self, top_dir ): - self.top_dir = top_dir - self.blocks = [] - - def add_text( self, text ): - if not text.endswith( '\n' ): - text += '\n' - self.blocks.append( text ) - - def add_file( self, relative_input_path, wrap_in_comment=False ): - def add_marker( prefix ): - self.add_text( '' ) - self.add_text( '// ' + '/'*70 ) - self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) - self.add_text( '// ' + '/'*70 ) - self.add_text( '' ) - add_marker( 'Beginning' ) - f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) - content = f.read() - if wrap_in_comment: - content = '/*\n' + content + '\n*/' - self.add_text( content ) - f.close() - add_marker( 'End' ) - self.add_text( '\n\n\n\n' ) - - def get_value( self ): - return ''.join( self.blocks ).replace('\r\n','\n') - - def write_to( self, output_path ): - output_dir = os.path.dirname( output_path ) - if output_dir and not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - f = open( output_path, 'wb' ) - f.write( self.get_value() ) - f.close() - -def amalgamate_source( source_top_dir=None, - target_source_path=None, - header_include_path=None ): - """Produces amalgated source. - Parameters: - source_top_dir: top-directory - target_source_path: output .cpp path - header_include_path: generated header path relative to target_source_path. - """ - print 'Amalgating header...' - header = AmalgamationFile( source_top_dir ) - header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) - header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) - header.add_file( 'LICENSE', wrap_in_comment=True ) - header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) - header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) - header.add_text( '/// If defined, indicates that the source file is amalgated' ) - header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) - header.add_file( 'include/json/config.h' ) - header.add_file( 'include/json/forwards.h' ) - header.add_file( 'include/json/features.h' ) - header.add_file( 'include/json/value.h' ) - header.add_file( 'include/json/reader.h' ) - header.add_file( 'include/json/writer.h' ) - header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) - - target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) - print 'Writing amalgated header to %r' % target_header_path - header.write_to( target_header_path ) - - base, ext = os.path.splitext( header_include_path ) - forward_header_include_path = base + '-forwards' + ext - print 'Amalgating forward header...' - header = AmalgamationFile( source_top_dir ) - header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) - header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) - header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) - header.add_file( 'LICENSE', wrap_in_comment=True ) - header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) - header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) - header.add_text( '/// If defined, indicates that the source file is amalgated' ) - header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) - header.add_file( 'include/json/config.h' ) - header.add_file( 'include/json/forwards.h' ) - header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) - - target_forward_header_path = os.path.join( os.path.dirname(target_source_path), - forward_header_include_path ) - print 'Writing amalgated forward header to %r' % target_forward_header_path - header.write_to( target_forward_header_path ) - - print 'Amalgating source...' - source = AmalgamationFile( source_top_dir ) - source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) - source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) - source.add_file( 'LICENSE', wrap_in_comment=True ) - source.add_text( '' ) - source.add_text( '#include <%s>' % header_include_path ) - source.add_text( '' ) - source.add_file( 'src/lib_json\json_tool.h' ) - source.add_file( 'src/lib_json\json_reader.cpp' ) - source.add_file( 'src/lib_json\json_batchallocator.h' ) - source.add_file( 'src/lib_json\json_valueiterator.inl' ) - source.add_file( 'src/lib_json\json_value.cpp' ) - source.add_file( 'src/lib_json\json_writer.cpp' ) - - print 'Writing amalgated source to %r' % target_source_path - source.write_to( target_source_path ) - -def main(): - usage = """%prog [options] -Generate a single amalgated source and header file from the sources. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', - help="""Output .cpp source path. [Default: %default]""") - parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', - help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") - parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), - help="""Source top-directory. [Default: %default]""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - msg = amalgamate_source( source_top_dir=options.top_dir, - target_source_path=options.target_source_path, - header_include_path=options.header_include_path ) - if msg: - sys.stderr.write( msg + '\n' ) - sys.exit( 1 ) - else: - print 'Source succesfully amalagated' - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py deleted file mode 100644 index c944e7c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py deleted file mode 100644 index bbb6fec..0000000 --- a/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py +++ /dev/null @@ -1,201 +0,0 @@ -#!/usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -from dircache import listdir -import re -import fnmatch -import os.path - - -# These fnmatch expressions are used by default to prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' - -# These fnmatch expressions are used by default to exclude files and dirs -# while doing the recursive traversal in the glob_impl method of glob function. -##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() - -# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree -# while doing the recursive traversal in the glob_impl method of glob function. -default_excludes = ''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/_darcs -**/_darcs/** -**/.DS_Store ''' - -DIR = 1 -FILE = 2 -DIR_LINK = 4 -FILE_LINK = 8 -LINKS = DIR_LINK | FILE_LINK -ALL_NO_LINK = DIR | FILE -ALL = DIR | FILE | LINKS - -_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) - -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' - """ - rex = ['^'] - next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos - if match.start(0) != next_pos: - raise ValueError( "Invalid ant pattern" ) - if match.group(1): # /**/ - rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) - elif match.group(2): # **/ - rex.append( '(?:.*%s)?' % sep_rex ) - elif match.group(3): # /** - rex.append( sep_rex + '.*' ) - elif match.group(4): # * - rex.append( '[^/%s]*' % re.escape(os.path.sep) ) - elif match.group(5): # / - rex.append( sep_rex ) - else: # somepath - rex.append( re.escape(match.group(6)) ) - next_pos = match.end() - rex.append('$') - return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l - -def glob(dir_path, - includes = '**/*', - excludes = default_excludes, - entry_type = FILE, - prune_dirs = prune_dirs, - max_depth = 25): - include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] - dir_path = dir_path.replace('/',os.path.sep) - entry_type_filter = entry_type - - def is_pruned_dir( dir_name ): - for pattern in prune_dirs: - if fnmatch.fnmatch( dir_name, pattern ): - return True - return False - - def apply_filter( full_path, filter_rexs ): - """Return True if at least one of the filter regular expression match full_path.""" - for rex in filter_rexs: - if rex.match( full_path ): - return True - return False - - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: - dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' - return list( glob_impl( dir_path ) ) - - -if __name__ == "__main__": - import unittest - - class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) - - unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py deleted file mode 100644 index 5d8372d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py +++ /dev/null @@ -1,63 +0,0 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py deleted file mode 100644 index 03e0467..0000000 --- a/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py deleted file mode 100644 index 182602e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py +++ /dev/null @@ -1,53 +0,0 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() diff --git a/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in deleted file mode 100644 index 48861d2..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in +++ /dev/null @@ -1,1534 +0,0 @@ -# Doxyfile 1.5.9 - -# This file describes the settings to be used by the documentation system -# doxygen (www.doxygen.org) for a project -# -# All text after a hash (#) is considered a comment and will be ignored -# The format is: -# TAG = value [value, ...] -# For lists items can also be appended using: -# TAG += value [value, ...] -# Values that contain spaces should be placed between quotes (" ") - -#--------------------------------------------------------------------------- -# Project related configuration options -#--------------------------------------------------------------------------- - -# This tag specifies the encoding used for all characters in the config file -# that follow. The default is UTF-8 which is also the encoding used for all -# text before the first occurrence of this tag. Doxygen uses libiconv (or the -# iconv built into libc) for the transcoding. See -# http://www.gnu.org/software/libiconv for the list of possible encodings. - -DOXYFILE_ENCODING = UTF-8 - -# The PROJECT_NAME tag is a single word (or a sequence of words surrounded -# by quotes) that should identify the project. - -PROJECT_NAME = "JsonCpp" - -# The PROJECT_NUMBER tag can be used to enter a project or revision number. -# This could be handy for archiving the generated documentation or -# if some version control system is used. - -PROJECT_NUMBER = %JSONCPP_VERSION% - -# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) -# base path where the generated documentation will be put. -# If a relative path is entered, it will be relative to the location -# where doxygen was started. If left blank the current directory will be used. - -OUTPUT_DIRECTORY = %DOC_TOPDIR% - -# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create -# 4096 sub-directories (in 2 levels) under the output directory of each output -# format and will distribute the generated files over these directories. -# Enabling this option can be useful when feeding doxygen a huge amount of -# source files, where putting all generated files in the same directory would -# otherwise cause performance problems for the file system. - -CREATE_SUBDIRS = NO - -# The OUTPUT_LANGUAGE tag is used to specify the language in which all -# documentation generated by doxygen is written. Doxygen will use this -# information to generate all constant output in the proper language. -# The default language is English, other supported languages are: -# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, -# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, -# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English -# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, -# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, -# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. - -OUTPUT_LANGUAGE = English - -# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will -# include brief member descriptions after the members that are listed in -# the file and class documentation (similar to JavaDoc). -# Set to NO to disable this. - -BRIEF_MEMBER_DESC = YES - -# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend -# the brief description of a member or function before the detailed description. -# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the -# brief descriptions will be completely suppressed. - -REPEAT_BRIEF = YES - -# This tag implements a quasi-intelligent brief description abbreviator -# that is used to form the text in various listings. Each string -# in this list, if found as the leading text of the brief description, will be -# stripped from the text and the result after processing the whole list, is -# used as the annotated text. Otherwise, the brief description is used as-is. -# If left blank, the following values are used ("$name" is automatically -# replaced with the name of the entity): "The $name class" "The $name widget" -# "The $name file" "is" "provides" "specifies" "contains" -# "represents" "a" "an" "the" - -ABBREVIATE_BRIEF = "The $name class" \ - "The $name widget" \ - "The $name file" \ - is \ - provides \ - specifies \ - contains \ - represents \ - a \ - an \ - the - -# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then -# Doxygen will generate a detailed section even if there is only a brief -# description. - -ALWAYS_DETAILED_SEC = NO - -# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all -# inherited members of a class in the documentation of that class as if those -# members were ordinary class members. Constructors, destructors and assignment -# operators of the base classes will not be shown. - -INLINE_INHERITED_MEMB = NO - -# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full -# path before files name in the file list and in the header files. If set -# to NO the shortest path that makes the file name unique will be used. - -FULL_PATH_NAMES = YES - -# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag -# can be used to strip a user-defined part of the path. Stripping is -# only done if one of the specified strings matches the left-hand part of -# the path. The tag can be used to show relative paths in the file list. -# If left blank the directory from which doxygen is run is used as the -# path to strip. - -STRIP_FROM_PATH = %TOPDIR% - -# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of -# the path mentioned in the documentation of a class, which tells -# the reader which header file to include in order to use a class. -# If left blank only the name of the header file containing the class -# definition is used. Otherwise one should specify the include paths that -# are normally passed to the compiler using the -I flag. - -STRIP_FROM_INC_PATH = %TOPDIR%/include - -# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter -# (but less readable) file names. This can be useful is your file systems -# doesn't support long names like on DOS, Mac, or CD-ROM. - -SHORT_NAMES = NO - -# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen -# will interpret the first line (until the first dot) of a JavaDoc-style -# comment as the brief description. If set to NO, the JavaDoc -# comments will behave just like regular Qt-style comments -# (thus requiring an explicit @brief command for a brief description.) - -JAVADOC_AUTOBRIEF = YES - -# If the QT_AUTOBRIEF tag is set to YES then Doxygen will -# interpret the first line (until the first dot) of a Qt-style -# comment as the brief description. If set to NO, the comments -# will behave just like regular Qt-style comments (thus requiring -# an explicit \brief command for a brief description.) - -QT_AUTOBRIEF = NO - -# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen -# treat a multi-line C++ special comment block (i.e. a block of //! or /// -# comments) as a brief description. This used to be the default behaviour. -# The new default is to treat a multi-line C++ comment block as a detailed -# description. Set this tag to YES if you prefer the old behaviour instead. - -MULTILINE_CPP_IS_BRIEF = NO - -# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented -# member inherits the documentation from any documented member that it -# re-implements. - -INHERIT_DOCS = YES - -# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce -# a new page for each member. If set to NO, the documentation of a member will -# be part of the file/class/namespace that contains it. - -SEPARATE_MEMBER_PAGES = NO - -# The TAB_SIZE tag can be used to set the number of spaces in a tab. -# Doxygen uses this value to replace tabs by spaces in code fragments. - -TAB_SIZE = 3 - -# This tag can be used to specify a number of aliases that acts -# as commands in the documentation. An alias has the form "name=value". -# For example adding "sideeffect=\par Side Effects:\n" will allow you to -# put the command \sideeffect (or @sideeffect) in the documentation, which -# will result in a user-defined paragraph with heading "Side Effects:". -# You can put \n's in the value part of an alias to insert newlines. - -ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ - "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ - "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ - "json_ref=JSON (JavaScript Object Notation)" - -# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C -# sources only. Doxygen will then generate output that is more tailored for C. -# For instance, some of the names that are used will be different. The list -# of all members will be omitted, etc. - -OPTIMIZE_OUTPUT_FOR_C = NO - -# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java -# sources only. Doxygen will then generate output that is more tailored for -# Java. For instance, namespaces will be presented as packages, qualified -# scopes will look different, etc. - -OPTIMIZE_OUTPUT_JAVA = NO - -# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran -# sources only. Doxygen will then generate output that is more tailored for -# Fortran. - -OPTIMIZE_FOR_FORTRAN = NO - -# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL -# sources. Doxygen will then generate output that is tailored for -# VHDL. - -OPTIMIZE_OUTPUT_VHDL = NO - -# Doxygen selects the parser to use depending on the extension of the files it parses. -# With this tag you can assign which parser to use for a given extension. -# Doxygen has a built-in mapping, but you can override or extend it using this tag. -# The format is ext=language, where ext is a file extension, and language is one of -# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, -# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat -# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), -# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. - -EXTENSION_MAPPING = - -# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want -# to include (a tag file for) the STL sources as input, then you should -# set this tag to YES in order to let doxygen match functions declarations and -# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. -# func(std::string) {}). This also make the inheritance and collaboration -# diagrams that involve STL classes more complete and accurate. - -BUILTIN_STL_SUPPORT = YES - -# If you use Microsoft's C++/CLI language, you should set this option to YES to -# enable parsing support. - -CPP_CLI_SUPPORT = NO - -# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. -# Doxygen will parse them like normal C++ but will assume all classes use public -# instead of private inheritance when no explicit protection keyword is present. - -SIP_SUPPORT = NO - -# For Microsoft's IDL there are propget and propput attributes to indicate getter -# and setter methods for a property. Setting this option to YES (the default) -# will make doxygen to replace the get and set methods by a property in the -# documentation. This will only work if the methods are indeed getting or -# setting a simple type. If this is not the case, or you want to show the -# methods anyway, you should set this option to NO. - -IDL_PROPERTY_SUPPORT = YES - -# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC -# tag is set to YES, then doxygen will reuse the documentation of the first -# member in the group (if any) for the other members of the group. By default -# all members of a group must be documented explicitly. - -DISTRIBUTE_GROUP_DOC = NO - -# Set the SUBGROUPING tag to YES (the default) to allow class member groups of -# the same type (for instance a group of public functions) to be put as a -# subgroup of that type (e.g. under the Public Functions section). Set it to -# NO to prevent subgrouping. Alternatively, this can be done per class using -# the \nosubgrouping command. - -SUBGROUPING = YES - -# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum -# is documented as struct, union, or enum with the name of the typedef. So -# typedef struct TypeS {} TypeT, will appear in the documentation as a struct -# with name TypeT. When disabled the typedef will appear as a member of a file, -# namespace, or class. And the struct will be named TypeS. This can typically -# be useful for C code in case the coding convention dictates that all compound -# types are typedef'ed and only the typedef is referenced, never the tag name. - -TYPEDEF_HIDES_STRUCT = NO - -# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to -# determine which symbols to keep in memory and which to flush to disk. -# When the cache is full, less often used symbols will be written to disk. -# For small to medium size projects (<1000 input files) the default value is -# probably good enough. For larger projects a too small cache size can cause -# doxygen to be busy swapping symbols to and from disk most of the time -# causing a significant performance penality. -# If the system has enough physical memory increasing the cache will improve the -# performance by keeping more symbols in memory. Note that the value works on -# a logarithmic scale so increasing the size by one will rougly double the -# memory usage. The cache size is given by this formula: -# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, -# corresponding to a cache size of 2^16 = 65536 symbols - -SYMBOL_CACHE_SIZE = 0 - -#--------------------------------------------------------------------------- -# Build related configuration options -#--------------------------------------------------------------------------- - -# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in -# documentation are documented, even if no documentation was available. -# Private class members and static file members will be hidden unless -# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES - -EXTRACT_ALL = YES - -# If the EXTRACT_PRIVATE tag is set to YES all private members of a class -# will be included in the documentation. - -EXTRACT_PRIVATE = NO - -# If the EXTRACT_STATIC tag is set to YES all static members of a file -# will be included in the documentation. - -EXTRACT_STATIC = YES - -# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) -# defined locally in source files will be included in the documentation. -# If set to NO only classes defined in header files are included. - -EXTRACT_LOCAL_CLASSES = NO - -# This flag is only useful for Objective-C code. When set to YES local -# methods, which are defined in the implementation section but not in -# the interface are included in the documentation. -# If set to NO (the default) only methods in the interface are included. - -EXTRACT_LOCAL_METHODS = NO - -# If this flag is set to YES, the members of anonymous namespaces will be -# extracted and appear in the documentation as a namespace called -# 'anonymous_namespace{file}', where file will be replaced with the base -# name of the file that contains the anonymous namespace. By default -# anonymous namespace are hidden. - -EXTRACT_ANON_NSPACES = NO - -# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all -# undocumented members of documented classes, files or namespaces. -# If set to NO (the default) these members will be included in the -# various overviews, but no documentation section is generated. -# This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_MEMBERS = NO - -# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all -# undocumented classes that are normally visible in the class hierarchy. -# If set to NO (the default) these classes will be included in the various -# overviews. This option has no effect if EXTRACT_ALL is enabled. - -HIDE_UNDOC_CLASSES = NO - -# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all -# friend (class|struct|union) declarations. -# If set to NO (the default) these declarations will be included in the -# documentation. - -HIDE_FRIEND_COMPOUNDS = NO - -# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any -# documentation blocks found inside the body of a function. -# If set to NO (the default) these blocks will be appended to the -# function's detailed documentation block. - -HIDE_IN_BODY_DOCS = NO - -# The INTERNAL_DOCS tag determines if documentation -# that is typed after a \internal command is included. If the tag is set -# to NO (the default) then the documentation will be excluded. -# Set it to YES to include the internal documentation. - -INTERNAL_DOCS = YES - -# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate -# file names in lower-case letters. If set to YES upper-case letters are also -# allowed. This is useful if you have classes or files whose names only differ -# in case and if your file system supports case sensitive file names. Windows -# and Mac users are advised to set this option to NO. - -CASE_SENSE_NAMES = NO - -# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen -# will show members with their full class and namespace scopes in the -# documentation. If set to YES the scope will be hidden. - -HIDE_SCOPE_NAMES = NO - -# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen -# will put a list of the files that are included by a file in the documentation -# of that file. - -SHOW_INCLUDE_FILES = YES - -# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] -# is inserted in the documentation for inline members. - -INLINE_INFO = YES - -# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen -# will sort the (detailed) documentation of file and class members -# alphabetically by member name. If set to NO the members will appear in -# declaration order. - -SORT_MEMBER_DOCS = YES - -# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the -# brief documentation of file, namespace and class members alphabetically -# by member name. If set to NO (the default) the members will appear in -# declaration order. - -SORT_BRIEF_DOCS = NO - -# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the -# hierarchy of group names into alphabetical order. If set to NO (the default) -# the group names will appear in their defined order. - -SORT_GROUP_NAMES = NO - -# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be -# sorted by fully-qualified names, including namespaces. If set to -# NO (the default), the class list will be sorted only by class name, -# not including the namespace part. -# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. -# Note: This option applies only to the class list, not to the -# alphabetical list. - -SORT_BY_SCOPE_NAME = YES - -# The GENERATE_TODOLIST tag can be used to enable (YES) or -# disable (NO) the todo list. This list is created by putting \todo -# commands in the documentation. - -GENERATE_TODOLIST = YES - -# The GENERATE_TESTLIST tag can be used to enable (YES) or -# disable (NO) the test list. This list is created by putting \test -# commands in the documentation. - -GENERATE_TESTLIST = NO - -# The GENERATE_BUGLIST tag can be used to enable (YES) or -# disable (NO) the bug list. This list is created by putting \bug -# commands in the documentation. - -GENERATE_BUGLIST = NO - -# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or -# disable (NO) the deprecated list. This list is created by putting -# \deprecated commands in the documentation. - -GENERATE_DEPRECATEDLIST= YES - -# The ENABLED_SECTIONS tag can be used to enable conditional -# documentation sections, marked by \if sectionname ... \endif. - -ENABLED_SECTIONS = - -# The MAX_INITIALIZER_LINES tag determines the maximum number of lines -# the initial value of a variable or define consists of for it to appear in -# the documentation. If the initializer consists of more lines than specified -# here it will be hidden. Use a value of 0 to hide initializers completely. -# The appearance of the initializer of individual variables and defines in the -# documentation can be controlled using \showinitializer or \hideinitializer -# command in the documentation regardless of this setting. - -MAX_INITIALIZER_LINES = 30 - -# Set the SHOW_USED_FILES tag to NO to disable the list of files generated -# at the bottom of the documentation of classes and structs. If set to YES the -# list will mention the files that were used to generate the documentation. - -SHOW_USED_FILES = YES - -# If the sources in your project are distributed over multiple directories -# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy -# in the documentation. The default is NO. - -SHOW_DIRECTORIES = YES - -# Set the SHOW_FILES tag to NO to disable the generation of the Files page. -# This will remove the Files entry from the Quick Index and from the -# Folder Tree View (if specified). The default is YES. - -SHOW_FILES = YES - -# Set the SHOW_NAMESPACES tag to NO to disable the generation of the -# Namespaces page. -# This will remove the Namespaces entry from the Quick Index -# and from the Folder Tree View (if specified). The default is YES. - -SHOW_NAMESPACES = YES - -# The FILE_VERSION_FILTER tag can be used to specify a program or script that -# doxygen should invoke to get the current version for each file (typically from -# the version control system). Doxygen will invoke the program by executing (via -# popen()) the command , where is the value of -# the FILE_VERSION_FILTER tag, and is the name of an input file -# provided by doxygen. Whatever the program writes to standard output -# is used as the file version. See the manual for examples. - -FILE_VERSION_FILTER = - -# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by -# doxygen. The layout file controls the global structure of the generated output files -# in an output format independent way. The create the layout file that represents -# doxygen's defaults, run doxygen with the -l option. You can optionally specify a -# file name after the option, if omitted DoxygenLayout.xml will be used as the name -# of the layout file. - -LAYOUT_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to warning and progress messages -#--------------------------------------------------------------------------- - -# The QUIET tag can be used to turn on/off the messages that are generated -# by doxygen. Possible values are YES and NO. If left blank NO is used. - -QUIET = NO - -# The WARNINGS tag can be used to turn on/off the warning messages that are -# generated by doxygen. Possible values are YES and NO. If left blank -# NO is used. - -WARNINGS = YES - -# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings -# for undocumented members. If EXTRACT_ALL is set to YES then this flag will -# automatically be disabled. - -WARN_IF_UNDOCUMENTED = YES - -# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for -# potential errors in the documentation, such as not documenting some -# parameters in a documented function, or documenting parameters that -# don't exist or using markup commands wrongly. - -WARN_IF_DOC_ERROR = YES - -# This WARN_NO_PARAMDOC option can be abled to get warnings for -# functions that are documented, but have no documentation for their parameters -# or return value. If set to NO (the default) doxygen will only warn about -# wrong or incomplete parameter documentation, but not about the absence of -# documentation. - -WARN_NO_PARAMDOC = NO - -# The WARN_FORMAT tag determines the format of the warning messages that -# doxygen can produce. The string should contain the $file, $line, and $text -# tags, which will be replaced by the file and line number from which the -# warning originated and the warning text. Optionally the format may contain -# $version, which will be replaced by the version of the file (if it could -# be obtained via FILE_VERSION_FILTER) - -WARN_FORMAT = "$file:$line: $text" - -# The WARN_LOGFILE tag can be used to specify a file to which warning -# and error messages should be written. If left blank the output is written -# to stderr. - -WARN_LOGFILE = %WARNING_LOG_PATH% - -#--------------------------------------------------------------------------- -# configuration options related to the input files -#--------------------------------------------------------------------------- - -# The INPUT tag can be used to specify the files and/or directories that contain -# documented source files. You may enter file names like "myfile.cpp" or -# directories like "/usr/src/myproject". Separate the files or directories -# with spaces. - -INPUT = ../include ../src/lib_json . - -# This tag can be used to specify the character encoding of the source files -# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is -# also the default input encoding. Doxygen uses libiconv (or the iconv built -# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for -# the list of possible encodings. - -INPUT_ENCODING = UTF-8 - -# If the value of the INPUT tag contains directories, you can use the -# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank the following patterns are tested: -# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx -# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 - -FILE_PATTERNS = *.h \ - *.cpp \ - *.inl \ - *.dox - -# The RECURSIVE tag can be used to turn specify whether or not subdirectories -# should be searched for input files as well. Possible values are YES and NO. -# If left blank NO is used. - -RECURSIVE = YES - -# The EXCLUDE tag can be used to specify files and/or directories that should -# excluded from the INPUT source files. This way you can easily exclude a -# subdirectory from a directory tree whose root is specified with the INPUT tag. - -EXCLUDE = - -# The EXCLUDE_SYMLINKS tag can be used select whether or not files or -# directories that are symbolic links (a Unix filesystem feature) are excluded -# from the input. - -EXCLUDE_SYMLINKS = NO - -# If the value of the INPUT tag contains directories, you can use the -# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude -# certain files from those directories. Note that the wildcards are matched -# against the file with absolute path, so to exclude all test directories -# for example use the pattern */test/* - -EXCLUDE_PATTERNS = - -# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names -# (namespaces, classes, functions, etc.) that should be excluded from the -# output. The symbol name can be a fully qualified name, a word, or if the -# wildcard * is used, a substring. Examples: ANamespace, AClass, -# AClass::ANamespace, ANamespace::*Test - -EXCLUDE_SYMBOLS = - -# The EXAMPLE_PATH tag can be used to specify one or more files or -# directories that contain example code fragments that are included (see -# the \include command). - -EXAMPLE_PATH = - -# If the value of the EXAMPLE_PATH tag contains directories, you can use the -# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp -# and *.h) to filter out the source-files in the directories. If left -# blank all files are included. - -EXAMPLE_PATTERNS = * - -# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be -# searched for input files to be used with the \include or \dontinclude -# commands irrespective of the value of the RECURSIVE tag. -# Possible values are YES and NO. If left blank NO is used. - -EXAMPLE_RECURSIVE = NO - -# The IMAGE_PATH tag can be used to specify one or more files or -# directories that contain image that are included in the documentation (see -# the \image command). - -IMAGE_PATH = - -# The INPUT_FILTER tag can be used to specify a program that doxygen should -# invoke to filter for each input file. Doxygen will invoke the filter program -# by executing (via popen()) the command , where -# is the value of the INPUT_FILTER tag, and is the name of an -# input file. Doxygen will then use the output that the filter program writes -# to standard output. -# If FILTER_PATTERNS is specified, this tag will be -# ignored. - -INPUT_FILTER = - -# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern -# basis. -# Doxygen will compare the file name with each pattern and apply the -# filter if there is a match. -# The filters are a list of the form: -# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further -# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER -# is applied to all files. - -FILTER_PATTERNS = - -# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using -# INPUT_FILTER) will be used to filter the input files when producing source -# files to browse (i.e. when SOURCE_BROWSER is set to YES). - -FILTER_SOURCE_FILES = NO - -#--------------------------------------------------------------------------- -# configuration options related to source browsing -#--------------------------------------------------------------------------- - -# If the SOURCE_BROWSER tag is set to YES then a list of source files will -# be generated. Documented entities will be cross-referenced with these sources. -# Note: To get rid of all source code in the generated output, make sure also -# VERBATIM_HEADERS is set to NO. - -SOURCE_BROWSER = YES - -# Setting the INLINE_SOURCES tag to YES will include the body -# of functions and classes directly in the documentation. - -INLINE_SOURCES = NO - -# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct -# doxygen to hide any special comment blocks from generated source code -# fragments. Normal C and C++ comments will always remain visible. - -STRIP_CODE_COMMENTS = YES - -# If the REFERENCED_BY_RELATION tag is set to YES -# then for each documented function all documented -# functions referencing it will be listed. - -REFERENCED_BY_RELATION = YES - -# If the REFERENCES_RELATION tag is set to YES -# then for each documented function all documented entities -# called/used by that function will be listed. - -REFERENCES_RELATION = YES - -# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) -# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from -# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will -# link to the source code. -# Otherwise they will link to the documentation. - -REFERENCES_LINK_SOURCE = YES - -# If the USE_HTAGS tag is set to YES then the references to source code -# will point to the HTML generated by the htags(1) tool instead of doxygen -# built-in source browser. The htags tool is part of GNU's global source -# tagging system (see http://www.gnu.org/software/global/global.html). You -# will need version 4.8.6 or higher. - -USE_HTAGS = NO - -# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen -# will generate a verbatim copy of the header file for each class for -# which an include is specified. Set to NO to disable this. - -VERBATIM_HEADERS = YES - -#--------------------------------------------------------------------------- -# configuration options related to the alphabetical class index -#--------------------------------------------------------------------------- - -# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index -# of all compounds will be generated. Enable this if the project -# contains a lot of classes, structs, unions or interfaces. - -ALPHABETICAL_INDEX = NO - -# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then -# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns -# in which this list will be split (can be a number in the range [1..20]) - -COLS_IN_ALPHA_INDEX = 5 - -# In case all classes in a project start with a common prefix, all -# classes will be put under the same header in the alphabetical index. -# The IGNORE_PREFIX tag can be used to specify one or more prefixes that -# should be ignored while generating the index headers. - -IGNORE_PREFIX = - -#--------------------------------------------------------------------------- -# configuration options related to the HTML output -#--------------------------------------------------------------------------- - -# If the GENERATE_HTML tag is set to YES (the default) Doxygen will -# generate HTML output. - -GENERATE_HTML = YES - -# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `html' will be used as the default path. - -HTML_OUTPUT = %HTML_OUTPUT% - -# The HTML_FILE_EXTENSION tag can be used to specify the file extension for -# each generated HTML page (for example: .htm,.php,.asp). If it is left blank -# doxygen will generate files with .html extension. - -HTML_FILE_EXTENSION = .html - -# The HTML_HEADER tag can be used to specify a personal HTML header for -# each generated HTML page. If it is left blank doxygen will generate a -# standard header. - -HTML_HEADER = header.html - -# The HTML_FOOTER tag can be used to specify a personal HTML footer for -# each generated HTML page. If it is left blank doxygen will generate a -# standard footer. - -HTML_FOOTER = footer.html - -# The HTML_STYLESHEET tag can be used to specify a user-defined cascading -# style sheet that is used by each HTML page. It can be used to -# fine-tune the look of the HTML output. If the tag is left blank doxygen -# will generate a default style sheet. Note that doxygen will try to copy -# the style sheet file to the HTML output directory, so don't put your own -# stylesheet in the HTML output directory as well, or it will be erased! - -HTML_STYLESHEET = - -# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, -# files or namespaces will be aligned in HTML using tables. If set to -# NO a bullet list will be used. - -HTML_ALIGN_MEMBERS = YES - -# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML -# documentation will contain sections that can be hidden and shown after the -# page has loaded. For this to work a browser that supports -# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox -# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). - -HTML_DYNAMIC_SECTIONS = YES - -# If the GENERATE_DOCSET tag is set to YES, additional index files -# will be generated that can be used as input for Apple's Xcode 3 -# integrated development environment, introduced with OSX 10.5 (Leopard). -# To create a documentation set, doxygen will generate a Makefile in the -# HTML output directory. Running make will produce the docset in that -# directory and running "make install" will install the docset in -# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find -# it at startup. -# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. - -GENERATE_DOCSET = NO - -# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the -# feed. A documentation feed provides an umbrella under which multiple -# documentation sets from a single provider (such as a company or product suite) -# can be grouped. - -DOCSET_FEEDNAME = "Doxygen generated docs" - -# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that -# should uniquely identify the documentation set bundle. This should be a -# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen -# will append .docset to the name. - -DOCSET_BUNDLE_ID = org.doxygen.Project - -# If the GENERATE_HTMLHELP tag is set to YES, additional index files -# will be generated that can be used as input for tools like the -# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) -# of the generated HTML documentation. - -GENERATE_HTMLHELP = %HTML_HELP% - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can -# be used to specify the file name of the resulting .chm file. You -# can add a path in front of the file if the result should not be -# written to the html output directory. - -CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm - -# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can -# be used to specify the location (absolute path including file name) of -# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run -# the HTML help compiler on the generated index.hhp. - -HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" - -# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag -# controls if a separate .chi index file is generated (YES) or that -# it should be included in the master .chm file (NO). - -GENERATE_CHI = YES - -# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING -# is used to encode HtmlHelp index (hhk), content (hhc) and project file -# content. - -CHM_INDEX_ENCODING = - -# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag -# controls whether a binary table of contents is generated (YES) or a -# normal table of contents (NO) in the .chm file. - -BINARY_TOC = YES - -# The TOC_EXPAND flag can be set to YES to add extra items for group members -# to the contents of the HTML help documentation and to the tree view. - -TOC_EXPAND = YES - -# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER -# are set, an additional index file will be generated that can be used as input for -# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated -# HTML documentation. - -GENERATE_QHP = NO - -# If the QHG_LOCATION tag is specified, the QCH_FILE tag can -# be used to specify the file name of the resulting .qch file. -# The path specified is relative to the HTML output folder. - -QCH_FILE = - -# The QHP_NAMESPACE tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#namespace - -QHP_NAMESPACE = - -# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating -# Qt Help Project output. For more information please see -# http://doc.trolltech.com/qthelpproject.html#virtual-folders - -QHP_VIRTUAL_FOLDER = doc - -# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. -# For more information please see -# http://doc.trolltech.com/qthelpproject.html#custom-filters - -QHP_CUST_FILTER_NAME = - -# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see -# Qt Help Project / Custom Filters. - -QHP_CUST_FILTER_ATTRS = - -# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's -# filter section matches. -# Qt Help Project / Filter Attributes. - -QHP_SECT_FILTER_ATTRS = - -# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can -# be used to specify the location of Qt's qhelpgenerator. -# If non-empty doxygen will try to run qhelpgenerator on the generated -# .qhp file. - -QHG_LOCATION = - -# The DISABLE_INDEX tag can be used to turn on/off the condensed index at -# top of each HTML page. The value NO (the default) enables the index and -# the value YES disables it. - -DISABLE_INDEX = NO - -# This tag can be used to set the number of enum values (range [1..20]) -# that doxygen will group on one line in the generated HTML documentation. - -ENUM_VALUES_PER_LINE = 4 - -# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index -# structure should be generated to display hierarchical information. -# If the tag value is set to FRAME, a side panel will be generated -# containing a tree-like index structure (just like the one that -# is generated for HTML Help). For this to work a browser that supports -# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, -# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are -# probably better off using the HTML help feature. Other possible values -# for this tag are: HIERARCHIES, which will generate the Groups, Directories, -# and Class Hierarchy pages using a tree view instead of an ordered list; -# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which -# disables this behavior completely. For backwards compatibility with previous -# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE -# respectively. - -GENERATE_TREEVIEW = NO - -# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be -# used to set the initial width (in pixels) of the frame in which the tree -# is shown. - -TREEVIEW_WIDTH = 250 - -# Use this tag to change the font size of Latex formulas included -# as images in the HTML documentation. The default is 10. Note that -# when you change the font size after a successful doxygen run you need -# to manually remove any form_*.png images from the HTML output directory -# to force them to be regenerated. - -FORMULA_FONTSIZE = 10 - -#--------------------------------------------------------------------------- -# configuration options related to the LaTeX output -#--------------------------------------------------------------------------- - -# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will -# generate Latex output. - -GENERATE_LATEX = NO - -# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `latex' will be used as the default path. - -LATEX_OUTPUT = latex - -# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be -# invoked. If left blank `latex' will be used as the default command name. - -LATEX_CMD_NAME = latex - -# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to -# generate index for LaTeX. If left blank `makeindex' will be used as the -# default command name. - -MAKEINDEX_CMD_NAME = makeindex - -# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact -# LaTeX documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_LATEX = NO - -# The PAPER_TYPE tag can be used to set the paper type that is used -# by the printer. Possible values are: a4, a4wide, letter, legal and -# executive. If left blank a4wide will be used. - -PAPER_TYPE = a4wide - -# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX -# packages that should be included in the LaTeX output. - -EXTRA_PACKAGES = - -# The LATEX_HEADER tag can be used to specify a personal LaTeX header for -# the generated latex document. The header should contain everything until -# the first chapter. If it is left blank doxygen will generate a -# standard header. Notice: only use this tag if you know what you are doing! - -LATEX_HEADER = - -# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated -# is prepared for conversion to pdf (using ps2pdf). The pdf file will -# contain links (just like the HTML output) instead of page references -# This makes the output suitable for online browsing using a pdf viewer. - -PDF_HYPERLINKS = NO - -# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of -# plain latex in the generated Makefile. Set this option to YES to get a -# higher quality PDF documentation. - -USE_PDFLATEX = NO - -# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. -# command to the generated LaTeX files. This will instruct LaTeX to keep -# running if errors occur, instead of asking the user for help. -# This option is also used when generating formulas in HTML. - -LATEX_BATCHMODE = NO - -# If LATEX_HIDE_INDICES is set to YES then doxygen will not -# include the index chapters (such as File Index, Compound Index, etc.) -# in the output. - -LATEX_HIDE_INDICES = NO - -# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. - -LATEX_SOURCE_CODE = NO - -#--------------------------------------------------------------------------- -# configuration options related to the RTF output -#--------------------------------------------------------------------------- - -# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output -# The RTF output is optimized for Word 97 and may not look very pretty with -# other RTF readers or editors. - -GENERATE_RTF = NO - -# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `rtf' will be used as the default path. - -RTF_OUTPUT = rtf - -# If the COMPACT_RTF tag is set to YES Doxygen generates more compact -# RTF documents. This may be useful for small projects and may help to -# save some trees in general. - -COMPACT_RTF = NO - -# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated -# will contain hyperlink fields. The RTF file will -# contain links (just like the HTML output) instead of page references. -# This makes the output suitable for online browsing using WORD or other -# programs which support those fields. -# Note: wordpad (write) and others do not support links. - -RTF_HYPERLINKS = NO - -# Load stylesheet definitions from file. Syntax is similar to doxygen's -# config file, i.e. a series of assignments. You only have to provide -# replacements, missing definitions are set to their default value. - -RTF_STYLESHEET_FILE = - -# Set optional variables used in the generation of an rtf document. -# Syntax is similar to doxygen's config file. - -RTF_EXTENSIONS_FILE = - -#--------------------------------------------------------------------------- -# configuration options related to the man page output -#--------------------------------------------------------------------------- - -# If the GENERATE_MAN tag is set to YES (the default) Doxygen will -# generate man pages - -GENERATE_MAN = NO - -# The MAN_OUTPUT tag is used to specify where the man pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `man' will be used as the default path. - -MAN_OUTPUT = man - -# The MAN_EXTENSION tag determines the extension that is added to -# the generated man pages (default is the subroutine's section .3) - -MAN_EXTENSION = .3 - -# If the MAN_LINKS tag is set to YES and Doxygen generates man output, -# then it will generate one additional man file for each entity -# documented in the real man page(s). These additional files -# only source the real man page, but without them the man command -# would be unable to find the correct page. The default is NO. - -MAN_LINKS = NO - -#--------------------------------------------------------------------------- -# configuration options related to the XML output -#--------------------------------------------------------------------------- - -# If the GENERATE_XML tag is set to YES Doxygen will -# generate an XML file that captures the structure of -# the code including all documentation. - -GENERATE_XML = NO - -# The XML_OUTPUT tag is used to specify where the XML pages will be put. -# If a relative path is entered the value of OUTPUT_DIRECTORY will be -# put in front of it. If left blank `xml' will be used as the default path. - -XML_OUTPUT = xml - -# The XML_SCHEMA tag can be used to specify an XML schema, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_SCHEMA = - -# The XML_DTD tag can be used to specify an XML DTD, -# which can be used by a validating XML parser to check the -# syntax of the XML files. - -XML_DTD = - -# If the XML_PROGRAMLISTING tag is set to YES Doxygen will -# dump the program listings (including syntax highlighting -# and cross-referencing information) to the XML output. Note that -# enabling this will significantly increase the size of the XML output. - -XML_PROGRAMLISTING = YES - -#--------------------------------------------------------------------------- -# configuration options for the AutoGen Definitions output -#--------------------------------------------------------------------------- - -# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will -# generate an AutoGen Definitions (see autogen.sf.net) file -# that captures the structure of the code including all -# documentation. Note that this feature is still experimental -# and incomplete at the moment. - -GENERATE_AUTOGEN_DEF = NO - -#--------------------------------------------------------------------------- -# configuration options related to the Perl module output -#--------------------------------------------------------------------------- - -# If the GENERATE_PERLMOD tag is set to YES Doxygen will -# generate a Perl module file that captures the structure of -# the code including all documentation. Note that this -# feature is still experimental and incomplete at the -# moment. - -GENERATE_PERLMOD = NO - -# If the PERLMOD_LATEX tag is set to YES Doxygen will generate -# the necessary Makefile rules, Perl scripts and LaTeX code to be able -# to generate PDF and DVI output from the Perl module output. - -PERLMOD_LATEX = NO - -# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be -# nicely formatted so it can be parsed by a human reader. -# This is useful -# if you want to understand what is going on. -# On the other hand, if this -# tag is set to NO the size of the Perl module output will be much smaller -# and Perl will parse it just the same. - -PERLMOD_PRETTY = YES - -# The names of the make variables in the generated doxyrules.make file -# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. -# This is useful so different doxyrules.make files included by the same -# Makefile don't overwrite each other's variables. - -PERLMOD_MAKEVAR_PREFIX = - -#--------------------------------------------------------------------------- -# Configuration options related to the preprocessor -#--------------------------------------------------------------------------- - -# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will -# evaluate all C-preprocessor directives found in the sources and include -# files. - -ENABLE_PREPROCESSING = YES - -# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro -# names in the source code. If set to NO (the default) only conditional -# compilation will be performed. Macro expansion can be done in a controlled -# way by setting EXPAND_ONLY_PREDEF to YES. - -MACRO_EXPANSION = YES - -# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES -# then the macro expansion is limited to the macros specified with the -# PREDEFINED and EXPAND_AS_DEFINED tags. - -EXPAND_ONLY_PREDEF = NO - -# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files -# in the INCLUDE_PATH (see below) will be search if a #include is found. - -SEARCH_INCLUDES = YES - -# The INCLUDE_PATH tag can be used to specify one or more directories that -# contain include files that are not input files but should be processed by -# the preprocessor. - -INCLUDE_PATH = ../include - -# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard -# patterns (like *.h and *.hpp) to filter out the header-files in the -# directories. If left blank, the patterns specified with FILE_PATTERNS will -# be used. - -INCLUDE_FILE_PATTERNS = *.h - -# The PREDEFINED tag can be used to specify one or more macro names that -# are defined before the preprocessor is started (similar to the -D option of -# gcc). The argument of the tag is a list of macros of the form: name -# or name=definition (no spaces). If the definition and the = are -# omitted =1 is assumed. To prevent a macro definition from being -# undefined via #undef or recursively expanded use the := operator -# instead of the = operator. - -PREDEFINED = "_MSC_VER=1400" \ - _CPPRTTI \ - _WIN32 \ - JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ - JSON_VALUE_USE_INTERNAL_MAP - -# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then -# this tag can be used to specify a list of macro names that should be expanded. -# The macro definition that is found in the sources will be used. -# Use the PREDEFINED tag if you want to use a different macro definition. - -EXPAND_AS_DEFINED = - -# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then -# doxygen's preprocessor will remove all function-like macros that are alone -# on a line, have an all uppercase name, and do not end with a semicolon. Such -# function macros are typically used for boiler-plate code, and will confuse -# the parser if not removed. - -SKIP_FUNCTION_MACROS = YES - -#--------------------------------------------------------------------------- -# Configuration::additions related to external references -#--------------------------------------------------------------------------- - -# The TAGFILES option can be used to specify one or more tagfiles. -# Optionally an initial location of the external documentation -# can be added for each tagfile. The format of a tag file without -# this location is as follows: -# -# TAGFILES = file1 file2 ... -# Adding location for the tag files is done as follows: -# -# TAGFILES = file1=loc1 "file2 = loc2" ... -# where "loc1" and "loc2" can be relative or absolute paths or -# URLs. If a location is present for each tag, the installdox tool -# does not have to be run to correct the links. -# Note that each tag file must have a unique name -# (where the name does NOT include the path) -# If a tag file is not located in the directory in which doxygen -# is run, you must also specify the path to the tagfile here. - -TAGFILES = - -# When a file name is specified after GENERATE_TAGFILE, doxygen will create -# a tag file that is based on the input files it reads. - -GENERATE_TAGFILE = - -# If the ALLEXTERNALS tag is set to YES all external classes will be listed -# in the class index. If set to NO only the inherited external classes -# will be listed. - -ALLEXTERNALS = NO - -# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed -# in the modules index. If set to NO, only the current project's groups will -# be listed. - -EXTERNAL_GROUPS = YES - -# The PERL_PATH should be the absolute path and name of the perl script -# interpreter (i.e. the result of `which perl'). - -PERL_PATH = /usr/bin/perl - -#--------------------------------------------------------------------------- -# Configuration options related to the dot tool -#--------------------------------------------------------------------------- - -# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will -# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base -# or super classes. Setting the tag to NO turns the diagrams off. Note that -# this option is superseded by the HAVE_DOT option below. This is only a -# fallback. It is recommended to install and use dot, since it yields more -# powerful graphs. - -CLASS_DIAGRAMS = NO - -# You can define message sequence charts within doxygen comments using the \msc -# command. Doxygen will then run the mscgen tool (see -# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the -# documentation. The MSCGEN_PATH tag allows you to specify the directory where -# the mscgen tool resides. If left empty the tool is assumed to be found in the -# default search path. - -MSCGEN_PATH = - -# If set to YES, the inheritance and collaboration graphs will hide -# inheritance and usage relations if the target is undocumented -# or is not a class. - -HIDE_UNDOC_RELATIONS = NO - -# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is -# available from the path. This tool is part of Graphviz, a graph visualization -# toolkit from AT&T and Lucent Bell Labs. The other options in this section -# have no effect if this option is set to NO (the default) - -HAVE_DOT = %HAVE_DOT% - -# By default doxygen will write a font called FreeSans.ttf to the output -# directory and reference it in all dot files that doxygen generates. This -# font does not include all possible unicode characters however, so when you need -# these (or just want a differently looking font) you can specify the font name -# using DOT_FONTNAME. You need need to make sure dot is able to find the font, -# which can be done by putting it in a standard location or by setting the -# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory -# containing the font. - -DOT_FONTNAME = FreeSans - -# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. -# The default size is 10pt. - -DOT_FONTSIZE = 10 - -# By default doxygen will tell dot to use the output directory to look for the -# FreeSans.ttf font (which doxygen will put there itself). If you specify a -# different font using DOT_FONTNAME you can set the path where dot -# can find it using this tag. - -DOT_FONTPATH = - -# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect inheritance relations. Setting this tag to YES will force the -# the CLASS_DIAGRAMS tag to NO. - -CLASS_GRAPH = YES - -# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for each documented class showing the direct and -# indirect implementation dependencies (inheritance, containment, and -# class references variables) of the class with other documented classes. - -COLLABORATION_GRAPH = YES - -# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen -# will generate a graph for groups, showing the direct groups dependencies - -GROUP_GRAPHS = YES - -# If the UML_LOOK tag is set to YES doxygen will generate inheritance and -# collaboration diagrams in a style similar to the OMG's Unified Modeling -# Language. - -UML_LOOK = %UML_LOOK% - -# If set to YES, the inheritance and collaboration graphs will show the -# relations between templates and their instances. - -TEMPLATE_RELATIONS = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT -# tags are set to YES then doxygen will generate a graph for each documented -# file showing the direct and indirect include dependencies of the file with -# other documented files. - -INCLUDE_GRAPH = YES - -# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and -# HAVE_DOT tags are set to YES then doxygen will generate a graph for each -# documented header file showing the documented files that directly or -# indirectly include this file. - -INCLUDED_BY_GRAPH = YES - -# If the CALL_GRAPH and HAVE_DOT options are set to YES then -# doxygen will generate a call dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable call graphs -# for selected functions only using the \callgraph command. - -CALL_GRAPH = NO - -# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then -# doxygen will generate a caller dependency graph for every global function -# or class method. Note that enabling this option will significantly increase -# the time of a run. So in most cases it will be better to enable caller -# graphs for selected functions only using the \callergraph command. - -CALLER_GRAPH = YES - -# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen -# will graphical hierarchy of all classes instead of a textual one. - -GRAPHICAL_HIERARCHY = YES - -# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES -# then doxygen will show the dependencies a directory has on other directories -# in a graphical way. The dependency relations are determined by the #include -# relations between the files in the directories. - -DIRECTORY_GRAPH = YES - -# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images -# generated by dot. Possible values are png, jpg, or gif -# If left blank png will be used. - -DOT_IMAGE_FORMAT = png - -# The tag DOT_PATH can be used to specify the path where the dot tool can be -# found. If left blank, it is assumed the dot tool can be found in the path. - -DOT_PATH = %DOT_PATH% - -# The DOTFILE_DIRS tag can be used to specify one or more directories that -# contain dot files that are included in the documentation (see the -# \dotfile command). - -DOTFILE_DIRS = - -# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of -# nodes that will be shown in the graph. If the number of nodes in a graph -# becomes larger than this value, doxygen will truncate the graph, which is -# visualized by representing a node as a red box. Note that doxygen if the -# number of direct children of the root node in a graph is already larger than -# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note -# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. - -DOT_GRAPH_MAX_NODES = 50 - -# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the -# graphs generated by dot. A depth value of 3 means that only nodes reachable -# from the root by following a path via at most 3 edges will be shown. Nodes -# that lay further from the root node will be omitted. Note that setting this -# option to 1 or 2 may greatly reduce the computation time needed for large -# code bases. Also note that the size of a graph can be further restricted by -# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. - -MAX_DOT_GRAPH_DEPTH = 1000 - -# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent -# background. This is disabled by default, because dot on Windows does not -# seem to support this out of the box. Warning: Depending on the platform used, -# enabling this option may lead to badly anti-aliased labels on the edges of -# a graph (i.e. they become hard to read). - -DOT_TRANSPARENT = NO - -# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output -# files in one run (i.e. multiple -o and -T options on the command line). This -# makes dot run faster, but since only newer versions of dot (>1.8.10) -# support this, this feature is disabled by default. - -DOT_MULTI_TARGETS = YES - -# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will -# generate a legend page explaining the meaning of the various boxes and -# arrows in the dot generated graphs. - -GENERATE_LEGEND = YES - -# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will -# remove the intermediate dot files that are used to generate -# the various graphs. - -DOT_CLEANUP = YES - -#--------------------------------------------------------------------------- -# Options related to the search engine -#--------------------------------------------------------------------------- - -# The SEARCHENGINE tag specifies whether or not a search engine should be -# used. If set to NO the values of all tags below this one will be ignored. - -SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc2/doc/footer.html b/tags/jsoncpp/0.6.0-rc2/doc/footer.html deleted file mode 100644 index a61d952..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/footer.html +++ /dev/null @@ -1,23 +0,0 @@ -
- - - - - - - -
- - SourceForge Logo - - hosts this site. - - - Send comments to:
- Json-cpp Developers -
- - - diff --git a/tags/jsoncpp/0.6.0-rc2/doc/header.html b/tags/jsoncpp/0.6.0-rc2/doc/header.html deleted file mode 100644 index 1a6ad61..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/header.html +++ /dev/null @@ -1,24 +0,0 @@ - - - -JsonCpp - JSON data format manipulation library - - - - - - - - - - - -
- - JsonCpp project page - - - JsonCpp home page -
- -
diff --git a/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox deleted file mode 100644 index 97cc108..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox +++ /dev/null @@ -1,126 +0,0 @@ -/** -\mainpage -\section _intro Introduction - -JSON (JavaScript Object Notation) - is a lightweight data-interchange format. -It can represent integer, real number, string, an ordered sequence of value, and -a collection of name/value pairs. - -Here is an example of JSON data: -\verbatim -// Configuration options -{ - // Default encoding for text - "encoding" : "UTF-8", - - // Plug-ins loaded at start-up - "plug-ins" : [ - "python", - "c++", - "ruby" - ], - - // Tab indent size - "indent" : { "length" : 3, "use_space": true } -} -\endverbatim - -\section _features Features -- read and write JSON document -- attach C and C++ style comments to element during parsing -- rewrite JSON document preserving original comments - -Notes: Comments used to be supported in JSON but where removed for -portability (C like comments are not supported in Python). Since -comments are useful in configuration/input file, this feature was -preserved. - -\section _example Code example - -\code -Json::Value root; // will contains the root value after parsing. -Json::Reader reader; -bool parsingSuccessful = reader.parse( config_doc, root ); -if ( !parsingSuccessful ) -{ - // report to the user the failure and their locations in the document. - std::cout << "Failed to parse configuration\n" - << reader.getFormattedErrorMessages(); - return; -} - -// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no -// such member. -std::string encoding = root.get("encoding", "UTF-8" ).asString(); -// Get the value of the member of root named 'encoding', return a 'null' value if -// there is no such member. -const Json::Value plugins = root["plug-ins"]; -for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. - loadPlugIn( plugins[index].asString() ); - -setIndentLength( root["indent"].get("length", 3).asInt() ); -setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); - -// ... -// At application shutdown to make the new configuration document: -// Since Json::Value has implicit constructor for all value types, it is not -// necessary to explicitly construct the Json::Value object: -root["encoding"] = getCurrentEncoding(); -root["indent"]["length"] = getCurrentIndentLength(); -root["indent"]["use_space"] = getCurrentIndentUseSpace(); - -Json::StyledWriter writer; -// Make a new JSON document for the configuration. Preserve original comments. -std::string outputConfig = writer.write( root ); - -// You can also use streams. This will put the contents of any JSON -// stream at a particular sub-value, if you'd like. -std::cin >> root["subtree"]; - -// And you can write to a stream, using the StyledWriter automatically. -std::cout << root; -\endcode - -\section _pbuild Build instructions -The build instructions are located in the file -README.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest README.txt - -\section _pdownload Download -The sources can be downloaded from -SourceForge download page. - -The latest version of the source is available in the project's subversion repository: - -http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ - -To checkout the source, see the following -instructions. - -\section _news What's New? -The description of latest changes can be found in -NEWS.txt in the top-directory of the project. - -Permanent link to the latest revision of the file in subversion: -latest NEWS.txt - -\section _plinks Project links -- json-cpp home -- json-cpp sourceforge project - -\section _rlinks Related links -- JSON Specification and alternate language implementations. -- YAML A data format designed for human readability. -- UTF-8 and Unicode FAQ. - -\section _license License -See file LICENSE in the top-directory of the project. - -Basically JsonCpp is licensed under MIT license, or public domain if desired -and recognized in your jurisdiction. - -\author Baptiste Lepilleur -*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doc/readme.txt b/tags/jsoncpp/0.6.0-rc2/doc/readme.txt deleted file mode 100644 index 0e42cdf..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/readme.txt +++ /dev/null @@ -1 +0,0 @@ -The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox deleted file mode 100644 index e6fc17a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox +++ /dev/null @@ -1,37 +0,0 @@ -/*! \page roadmap JsonCpp roadmap - \section ms_release Makes JsonCpp ready for release - - Build system clean-up: - - Fix build on Windows (shared-library build is broken) - - Add enable/disable flag for static and shared library build - - Enhance help - - Platform portability check: (Notes: was ok on last check) - - linux/gcc, - - solaris/cc, - - windows/msvc678, - - aix/vacpp - - Add JsonCpp version to header as numeric for use in preprocessor test - - Remove buggy experimental hash stuff - \section ms_strict Adds a strict mode to reader/parser - Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). - - Enforce only object or array as root element - - Disable comment support - - Get jsonchecker failing tests to pass in strict mode - \section ms_writer Writter control - Provides more control to determine how specific items are serialized when JSON allow choice: - - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". - - Optionally allow escaping of "/" using "\/". - \section ms_separation Expose json reader/writer API that do not impose using Json::Value. - Some typical use-case involve an application specific structure to/from a JSON document. - - Event base parser to allow unserializing a Json document directly in datastructure instead of - using the intermediate Json::Value. - - Stream based parser to serialized a Json document without using Json::Value as input. - - Performance oriented parser/writer: - - Provides an event based parser. Should allow pulling & skipping events for ease of use. - - Provides a JSON document builder: fast only. - \section ms_perfo Performance tuning - - Provides support for static property name definition avoiding allocation - - Static property dictionnary can be provided to JSON reader - - Performance scenario & benchmarking - \section testing Testing - - Adds more tests for unicode parsing (e.g. including surrogate and error detection). -*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doxybuild.py b/tags/jsoncpp/0.6.0-rc2/doxybuild.py deleted file mode 100644 index 03ad68d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/doxybuild.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Script to generate doxygen documentation. -""" - -import re -import os -import os.path -import sys -import shutil -from devtools import tarball - -def find_program(*filenames): - """find a program in folders path_lst, and sets env[var] - @param filenames: a list of possible names of the program to search for - @return: the full path of the filename if found, or '' if filename could not be found -""" - paths = os.environ.get('PATH', '').split(os.pathsep) - suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' - for filename in filenames: - for name in [filename+ext for ext in suffixes.split()]: - for directory in paths: - full_path = os.path.join(directory, name) - if os.path.isfile(full_path): - return full_path - return '' - -def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - print "Can't read source file %s"%sourcefile - raise - for (k,v) in dict.items(): - v = v.replace('\\','\\\\') - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - print "Can't write target file %s"%targetfile - raise - -def run_doxygen(doxygen_path, config_file, working_dir, is_silent): - config_file = os.path.abspath( config_file ) - doxygen_path = doxygen_path - old_cwd = os.getcwd() - try: - os.chdir( working_dir ) - cmd = [doxygen_path, config_file] - print 'Running:', ' '.join( cmd ) - try: - import subprocess - except: - if os.system( ' '.join( cmd ) ) != 0: - print 'Documentation generation failed' - return False - else: - if is_silent: - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - else: - process = subprocess.Popen( cmd ) - stdout, _ = process.communicate() - if process.returncode: - print 'Documentation generation failed:' - print stdout - return False - return True - finally: - os.chdir( old_cwd ) - -def build_doc( options, make_release=False ): - if make_release: - options.make_tarball = True - options.with_dot = True - options.with_html_help = True - options.with_uml_look = True - options.open = False - options.silent = True - - version = open('version','rt').read().strip() - output_dir = 'dist/doxygen' # relative to doc/doxyfile location. - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - top_dir = os.path.abspath( '.' ) - html_output_dirname = 'jsoncpp-api-html-' + version - tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) - warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) - html_output_path = os.path.join( output_dir, html_output_dirname ) - def yesno( bool ): - return bool and 'YES' or 'NO' - subst_keys = { - '%JSONCPP_VERSION%': version, - '%DOC_TOPDIR%': '', - '%TOPDIR%': top_dir, - '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), - '%HAVE_DOT%': yesno(options.with_dot), - '%DOT_PATH%': os.path.split(options.dot_path)[0], - '%HTML_HELP%': yesno(options.with_html_help), - '%UML_LOOK%': yesno(options.with_uml_look), - '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) - } - - if os.path.isdir( output_dir ): - print 'Deleting directory:', output_dir - shutil.rmtree( output_dir ) - if not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - - do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) - ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) - if not options.silent: - print open(warning_log_path, 'rb').read() - index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) - print 'Generated documentation can be found in:' - print index_path - if options.open: - import webbrowser - webbrowser.open( 'file://' + index_path ) - if options.make_tarball: - print 'Generating doc tarball to', tarball_path - tarball_sources = [ - output_dir, - 'README.txt', - 'LICENSE', - 'NEWS.txt', - 'version' - ] - tarball_basedir = os.path.join( output_dir, html_output_dirname ) - tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) - return tarball_path, html_output_dirname - -def main(): - usage = """%prog - Generates doxygen documentation in build/doxygen. - Optionaly makes a tarball of the documentation to dist/. - - Must be started in the project top directory. - """ - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, - help="""Enable usage of DOT to generate collaboration diagram""") - parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, - help="""Enable generation of Microsoft HTML HELP""") - parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, - help="""Generates DOT graph without UML look [Default: False]""") - parser.add_option('--open', dest="open", action='store_true', default=False, - help="""Open the HTML index in the web browser after generation""") - parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, - help="""Generates a tarball of the documentation in dist/ directory""") - parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, - help="""Hides doxygen output""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - build_doc( options ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h deleted file mode 100644 index 02328d1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_AUTOLINK_H_INCLUDED -# define JSON_AUTOLINK_H_INCLUDED - -# include "config.h" - -# ifdef JSON_IN_CPPTL -# include -# endif - -# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) -# define CPPTL_AUTOLINK_NAME "json" -# undef CPPTL_AUTOLINK_DLL -# ifdef JSON_DLL -# define CPPTL_AUTOLINK_DLL -# endif -# include "autolink.h" -# endif - -#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/config.h b/tags/jsoncpp/0.6.0-rc2/include/json/config.h deleted file mode 100644 index 7609d45..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/config.h +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_CONFIG_H_INCLUDED -# define JSON_CONFIG_H_INCLUDED - -/// If defined, indicates that json library is embedded in CppTL library. -//# define JSON_IN_CPPTL 1 - -/// If defined, indicates that json may leverage CppTL library -//# define JSON_USE_CPPTL 1 -/// If defined, indicates that cpptl vector based map should be used instead of std::map -/// as Value container. -//# define JSON_USE_CPPTL_SMALLMAP 1 -/// If defined, indicates that Json specific container should be used -/// (hash table & simple deque container with customizable allocator). -/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 -//# define JSON_VALUE_USE_INTERNAL_MAP 1 -/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. -/// The memory pools allocator used optimization (initializing Value and ValueInternalLink -/// as if it was a POD) that may cause some validation tool to report errors. -/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. -//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 - -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. -# define JSON_USE_EXCEPTION 1 - -/// If defined, indicates that the source file is amalgated -/// to prevent private header inclusion. -/// Remarks: it is automatically defined in the generated amalgated header. -// #define JSON_IS_AMALGAMATION - - -# ifdef JSON_IN_CPPTL -# include -# ifndef JSON_USE_CPPTL -# define JSON_USE_CPPTL 1 -# endif -# endif - -# ifdef JSON_IN_CPPTL -# define JSON_API CPPTL_API -# elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) -# elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else -# define JSON_API -# endif - -// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer -// Storages, and 64 bits integer support is disabled. -// #define JSON_NO_INT64 1 - -#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 -// Microsoft Visual Studio 6 only support conversion from __int64 to double -// (no conversion from unsigned __int64). -#define JSON_USE_INT64_DOUBLE_CONVERSION 1 -#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 - -#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 -/// Indicates that the following function is deprecated. -# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) -#endif - -#if !defined(JSONCPP_DEPRECATED) -# define JSONCPP_DEPRECATED(message) -#endif // if !defined(JSONCPP_DEPRECATED) - -namespace Json { - typedef int Int; - typedef unsigned int UInt; -# if defined(JSON_NO_INT64) - typedef int LargestInt; - typedef unsigned int LargestUInt; -# undef JSON_HAS_INT64 -# else // if defined(JSON_NO_INT64) - // For Microsoft Visual use specific types as long long is not supported -# if defined(_MSC_VER) // Microsoft Visual Studio - typedef __int64 Int64; - typedef unsigned __int64 UInt64; -# else // if defined(_MSC_VER) // Other platforms, use long long - typedef long long int Int64; - typedef unsigned long long int UInt64; -# endif // if defined(_MSC_VER) - typedef Int64 LargestInt; - typedef UInt64 LargestUInt; -# define JSON_HAS_INT64 -# endif // if defined(JSON_NO_INT64) -} // end namespace Json - - -#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/features.h b/tags/jsoncpp/0.6.0-rc2/include/json/features.h deleted file mode 100644 index 4353278..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/features.h +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_FEATURES_H_INCLUDED -# define CPPTL_JSON_FEATURES_H_INCLUDED - -#if !defined(JSON_IS_AMALGAMATION) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGAMATION) - -namespace Json { - - /** \brief Configuration passed to reader and writer. - * This configuration object can be used to force the Reader or Writer - * to behave in a standard conforming way. - */ - class JSON_API Features - { - public: - /** \brief A configuration that allows all features and assumes all strings are UTF-8. - * - C & C++ comments are allowed - * - Root object can be any JSON value - * - Assumes Value strings are encoded in UTF-8 - */ - static Features all(); - - /** \brief A configuration that is strictly compatible with the JSON specification. - * - Comments are forbidden. - * - Root object must be either an array or an object value. - * - Assumes Value strings are encoded in UTF-8 - */ - static Features strictMode(); - - /** \brief Initialize the configuration like JsonConfig::allFeatures; - */ - Features(); - - /// \c true if comments are allowed. Default: \c true. - bool allowComments_; - - /// \c true if root must be either an array or an object value. Default: \c false. - bool strictRoot_; - }; - -} // namespace Json - -#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h deleted file mode 100644 index ab863da..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_FORWARDS_H_INCLUDED -# define JSON_FORWARDS_H_INCLUDED - -#if !defined(JSON_IS_AMALGAMATION) -# include "config.h" -#endif // if !defined(JSON_IS_AMALGAMATION) - -namespace Json { - - // writer.h - class FastWriter; - class StyledWriter; - - // reader.h - class Reader; - - // features.h - class Features; - - // value.h - typedef unsigned int ArrayIndex; - class StaticString; - class Path; - class PathArgument; - class Value; - class ValueIteratorBase; - class ValueIterator; - class ValueConstIterator; -#ifdef JSON_VALUE_USE_INTERNAL_MAP - class ValueMapAllocator; - class ValueInternalLink; - class ValueInternalArray; - class ValueInternalMap; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - -} // namespace Json - - -#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/json.h b/tags/jsoncpp/0.6.0-rc2/include/json/json.h deleted file mode 100644 index da5fc96..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/json.h +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_JSON_H_INCLUDED -# define JSON_JSON_H_INCLUDED - -# include "autolink.h" -# include "value.h" -# include "reader.h" -# include "writer.h" -# include "features.h" - -#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/reader.h b/tags/jsoncpp/0.6.0-rc2/include/json/reader.h deleted file mode 100644 index 0a324df..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/reader.h +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_READER_H_INCLUDED -# define CPPTL_JSON_READER_H_INCLUDED - -#if !defined(JSON_IS_AMALGAMATION) -# include "features.h" -# include "value.h" -#endif // if !defined(JSON_IS_AMALGAMATION) -# include -# include -# include -# include - -namespace Json { - - /** \brief Unserialize a JSON document into a Value. - * - */ - class JSON_API Reader - { - public: - typedef char Char; - typedef const Char *Location; - - /** \brief Constructs a Reader allowing all features - * for parsing. - */ - Reader(); - - /** \brief Constructs a Reader allowing the specified feature set - * for parsing. - */ - Reader( const Features &features ); - - /** \brief Read a Value from a JSON document. - * \param document UTF-8 encoded string containing the document to read. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const std::string &document, - Value &root, - bool collectComments = true ); - - /** \brief Read a Value from a JSON document. - * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. - * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. - \ Must be >= beginDoc. - * \param root [out] Contains the root value of the document if it was - * successfully parsed. - * \param collectComments \c true to collect comment and allow writing them back during - * serialization, \c false to discard comments. - * This parameter is ignored if Features::allowComments_ - * is \c false. - * \return \c true if the document was successfully parsed, \c false if an error occurred. - */ - bool parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments = true ); - - /// \brief Parse from input stream. - /// \see Json::operator>>(std::istream&, Json::Value&). - bool parse( std::istream &is, - Value &root, - bool collectComments = true ); - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - * \deprecated Use getFormattedErrorMessages() instead (typo fix). - */ - JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") - std::string getFormatedErrorMessages() const; - - /** \brief Returns a user friendly string that list errors in the parsed document. - * \return Formatted error message with the list of errors with their location in - * the parsed document. An empty string is returned if no error occurred - * during parsing. - */ - std::string getFormattedErrorMessages() const; - - private: - enum TokenType - { - tokenEndOfStream = 0, - tokenObjectBegin, - tokenObjectEnd, - tokenArrayBegin, - tokenArrayEnd, - tokenString, - tokenNumber, - tokenTrue, - tokenFalse, - tokenNull, - tokenArraySeparator, - tokenMemberSeparator, - tokenComment, - tokenError - }; - - class Token - { - public: - TokenType type_; - Location start_; - Location end_; - }; - - class ErrorInfo - { - public: - Token token_; - std::string message_; - Location extra_; - }; - - typedef std::deque Errors; - - bool expectToken( TokenType type, Token &token, const char *message ); - bool readToken( Token &token ); - void skipSpaces(); - bool match( Location pattern, - int patternLength ); - bool readComment(); - bool readCStyleComment(); - bool readCppStyleComment(); - bool readString(); - void readNumber(); - bool readValue(); - bool readObject( Token &token ); - bool readArray( Token &token ); - bool decodeNumber( Token &token ); - bool decodeString( Token &token ); - bool decodeString( Token &token, std::string &decoded ); - bool decodeDouble( Token &token ); - bool decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ); - bool addError( const std::string &message, - Token &token, - Location extra = 0 ); - bool recoverFromError( TokenType skipUntilToken ); - bool addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ); - void skipUntilSpace(); - Value ¤tValue(); - Char getNextChar(); - void getLocationLineAndColumn( Location location, - int &line, - int &column ) const; - std::string getLocationLineAndColumn( Location location ) const; - void addComment( Location begin, - Location end, - CommentPlacement placement ); - void skipCommentTokens( Token &token ); - - typedef std::stack Nodes; - Nodes nodes_; - Errors errors_; - std::string document_; - Location begin_; - Location end_; - Location current_; - Location lastValueEnd_; - Value *lastValue_; - std::string commentsBefore_; - Features features_; - bool collectComments_; - }; - - /** \brief Read from 'sin' into 'root'. - - Always keep comments from the input JSON. - - This can be used to read a file into a particular sub-object. - For example: - \code - Json::Value root; - cin >> root["dir"]["file"]; - cout << root; - \endcode - Result: - \verbatim - { - "dir": { - "file": { - // The input stream JSON would be nested here. - } - } - } - \endverbatim - \throw std::exception on parse error. - \see Json::operator<<() - */ - std::istream& operator>>( std::istream&, Value& ); - -} // namespace Json - -#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/value.h b/tags/jsoncpp/0.6.0-rc2/include/json/value.h deleted file mode 100644 index 32e3455..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/value.h +++ /dev/null @@ -1,1103 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef CPPTL_JSON_H_INCLUDED -# define CPPTL_JSON_H_INCLUDED - -#if !defined(JSON_IS_AMALGAMATION) -# include "forwards.h" -#endif // if !defined(JSON_IS_AMALGAMATION) -# include -# include - -# ifndef JSON_USE_CPPTL_SMALLMAP -# include -# else -# include -# endif -# ifdef JSON_USE_CPPTL -# include -# endif - -/** \brief JSON (JavaScript Object Notation). - */ -namespace Json { - - /** \brief Type of the value held by a Value object. - */ - enum ValueType - { - nullValue = 0, ///< 'null' value - intValue, ///< signed integer value - uintValue, ///< unsigned integer value - realValue, ///< double value - stringValue, ///< UTF-8 string value - booleanValue, ///< bool value - arrayValue, ///< array value (ordered list) - objectValue ///< object value (collection of name/value pairs). - }; - - enum CommentPlacement - { - commentBefore = 0, ///< a comment placed on the line before a value - commentAfterOnSameLine, ///< a comment just after a value on the same line - commentAfter, ///< a comment on the line after a value (only make sense for root value) - numberOfCommentPlacement - }; - -//# ifdef JSON_USE_CPPTL -// typedef CppTL::AnyEnumerator EnumMemberNames; -// typedef CppTL::AnyEnumerator EnumValues; -//# endif - - /** \brief Lightweight wrapper to tag static string. - * - * Value constructor and objectValue member assignement takes advantage of the - * StaticString and avoid the cost of string duplication when storing the - * string or the member name. - * - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - class JSON_API StaticString - { - public: - explicit StaticString( const char *czstring ) - : str_( czstring ) - { - } - - operator const char *() const - { - return str_; - } - - const char *c_str() const - { - return str_; - } - - private: - const char *str_; - }; - - /** \brief Represents a JSON value. - * - * This class is a discriminated union wrapper that can represents a: - * - signed integer [range: Value::minInt - Value::maxInt] - * - unsigned integer (range: 0 - Value::maxUInt) - * - double - * - UTF-8 string - * - boolean - * - 'null' - * - an ordered list of Value - * - collection of name/value pairs (javascript object) - * - * The type of the held value is represented by a #ValueType and - * can be obtained using type(). - * - * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. - * Non const methods will automatically create the a #nullValue element - * if it does not exist. - * The sequence of an #arrayValue will be automatically resize and initialized - * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. - * - * The get() methods can be used to obtanis default value in the case the required element - * does not exist. - * - * It is possible to iterate over the list of a #objectValue values using - * the getMemberNames() method. - */ - class JSON_API Value - { - friend class ValueIteratorBase; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - friend class ValueInternalLink; - friend class ValueInternalMap; -# endif - public: - typedef std::vector Members; - typedef ValueIterator iterator; - typedef ValueConstIterator const_iterator; - typedef Json::UInt UInt; - typedef Json::Int Int; -# if defined(JSON_HAS_INT64) - typedef Json::UInt64 UInt64; - typedef Json::Int64 Int64; -#endif // defined(JSON_HAS_INT64) - typedef Json::LargestInt LargestInt; - typedef Json::LargestUInt LargestUInt; - typedef Json::ArrayIndex ArrayIndex; - - static const Value null; - /// Minimum signed integer value that can be stored in a Json::Value. - static const LargestInt minLargestInt; - /// Maximum signed integer value that can be stored in a Json::Value. - static const LargestInt maxLargestInt; - /// Maximum unsigned integer value that can be stored in a Json::Value. - static const LargestUInt maxLargestUInt; - - /// Minimum signed int value that can be stored in a Json::Value. - static const Int minInt; - /// Maximum signed int value that can be stored in a Json::Value. - static const Int maxInt; - /// Maximum unsigned int value that can be stored in a Json::Value. - static const UInt maxUInt; - - /// Minimum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 minInt64; - /// Maximum signed 64 bits int value that can be stored in a Json::Value. - static const Int64 maxInt64; - /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. - static const UInt64 maxUInt64; - - private: -#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION -# ifndef JSON_VALUE_USE_INTERNAL_MAP - class CZString - { - public: - enum DuplicationPolicy - { - noDuplication = 0, - duplicate, - duplicateOnCopy - }; - CZString( ArrayIndex index ); - CZString( const char *cstr, DuplicationPolicy allocate ); - CZString( const CZString &other ); - ~CZString(); - CZString &operator =( const CZString &other ); - bool operator<( const CZString &other ) const; - bool operator==( const CZString &other ) const; - ArrayIndex index() const; - const char *c_str() const; - bool isStaticString() const; - private: - void swap( CZString &other ); - const char *cstr_; - ArrayIndex index_; - }; - - public: -# ifndef JSON_USE_CPPTL_SMALLMAP - typedef std::map ObjectValues; -# else - typedef CppTL::SmallMap ObjectValues; -# endif // ifndef JSON_USE_CPPTL_SMALLMAP -# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP -#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - public: - /** \brief Create a default Value of the given type. - - This is a very useful constructor. - To create an empty array, pass arrayValue. - To create an empty object, pass objectValue. - Another Value can then be set to this one by assignment. - This is useful since clear() and resize() will not alter types. - - Examples: - \code - Json::Value null_value; // null - Json::Value arr_value(Json::arrayValue); // [] - Json::Value obj_value(Json::objectValue); // {} - \endcode - */ - Value( ValueType type = nullValue ); - Value( Int value ); - Value( UInt value ); -#if defined(JSON_HAS_INT64) - Value( Int64 value ); - Value( UInt64 value ); -#endif // if defined(JSON_HAS_INT64) - Value( double value ); - Value( const char *value ); - Value( const char *beginValue, const char *endValue ); - /** \brief Constructs a value from a static string. - - * Like other value string constructor but do not duplicate the string for - * internal storage. The given string must remain alive after the call to this - * constructor. - * Example of usage: - * \code - * Json::Value aValue( StaticString("some text") ); - * \endcode - */ - Value( const StaticString &value ); - Value( const std::string &value ); -# ifdef JSON_USE_CPPTL - Value( const CppTL::ConstString &value ); -# endif - Value( bool value ); - Value( const Value &other ); - ~Value(); - - Value &operator=( const Value &other ); - /// Swap values. - /// \note Currently, comments are intentionally not swapped, for - /// both logic and efficiency. - void swap( Value &other ); - - ValueType type() const; - - bool operator <( const Value &other ) const; - bool operator <=( const Value &other ) const; - bool operator >=( const Value &other ) const; - bool operator >( const Value &other ) const; - - bool operator ==( const Value &other ) const; - bool operator !=( const Value &other ) const; - - int compare( const Value &other ) const; - - const char *asCString() const; - std::string asString() const; -# ifdef JSON_USE_CPPTL - CppTL::ConstString asConstString() const; -# endif - Int asInt() const; - UInt asUInt() const; - Int64 asInt64() const; - UInt64 asUInt64() const; - LargestInt asLargestInt() const; - LargestUInt asLargestUInt() const; - float asFloat() const; - double asDouble() const; - bool asBool() const; - - bool isNull() const; - bool isBool() const; - bool isInt() const; - bool isUInt() const; - bool isIntegral() const; - bool isDouble() const; - bool isNumeric() const; - bool isString() const; - bool isArray() const; - bool isObject() const; - - bool isConvertibleTo( ValueType other ) const; - - /// Number of values in array or object - ArrayIndex size() const; - - /// \brief Return true if empty array, empty object, or null; - /// otherwise, false. - bool empty() const; - - /// Return isNull() - bool operator!() const; - - /// Remove all object members and array elements. - /// \pre type() is arrayValue, objectValue, or nullValue - /// \post type() is unchanged - void clear(); - - /// Resize the array to size elements. - /// New elements are initialized to null. - /// May only be called on nullValue or arrayValue. - /// \pre type() is arrayValue or nullValue - /// \post type() is arrayValue - void resize( ArrayIndex size ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( ArrayIndex index ); - - /// Access an array element (zero based index ). - /// If the array contains less than index element, then null value are inserted - /// in the array so that its size is index+1. - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - Value &operator[]( int index ); - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( ArrayIndex index ) const; - - /// Access an array element (zero based index ) - /// (You may need to say 'value[0u]' to get your compiler to distinguish - /// this from the operator[] which takes a string.) - const Value &operator[]( int index ) const; - - /// If the array contains at least index+1 elements, returns the element value, - /// otherwise returns defaultValue. - Value get( ArrayIndex index, - const Value &defaultValue ) const; - /// Return true if index < size(). - bool isValidIndex( ArrayIndex index ) const; - /// \brief Append value to array at the end. - /// - /// Equivalent to jsonvalue[jsonvalue.size()] = value; - Value &append( const Value &value ); - - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const char *key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const char *key ) const; - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const std::string &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const std::string &key ) const; - /** \brief Access an object value by name, create a null member if it does not exist. - - * If the object as no entry for that name, then the member name used to store - * the new entry is not duplicated. - * Example of use: - * \code - * Json::Value object; - * static const StaticString code("code"); - * object[code] = 1234; - * \endcode - */ - Value &operator[]( const StaticString &key ); -# ifdef JSON_USE_CPPTL - /// Access an object value by name, create a null member if it does not exist. - Value &operator[]( const CppTL::ConstString &key ); - /// Access an object value by name, returns null if there is no member with that name. - const Value &operator[]( const CppTL::ConstString &key ) const; -# endif - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const char *key, - const Value &defaultValue ) const; - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const std::string &key, - const Value &defaultValue ) const; -# ifdef JSON_USE_CPPTL - /// Return the member named key if it exist, defaultValue otherwise. - Value get( const CppTL::ConstString &key, - const Value &defaultValue ) const; -# endif - /// \brief Remove and return the named member. - /// - /// Do nothing if it did not exist. - /// \return the removed Value, or null. - /// \pre type() is objectValue or nullValue - /// \post type() is unchanged - Value removeMember( const char* key ); - /// Same as removeMember(const char*) - Value removeMember( const std::string &key ); - - /// Return true if the object has a member named key. - bool isMember( const char *key ) const; - /// Return true if the object has a member named key. - bool isMember( const std::string &key ) const; -# ifdef JSON_USE_CPPTL - /// Return true if the object has a member named key. - bool isMember( const CppTL::ConstString &key ) const; -# endif - - /// \brief Return a list of the member names. - /// - /// If null, return an empty list. - /// \pre type() is objectValue or nullValue - /// \post if type() was nullValue, it remains nullValue - Members getMemberNames() const; - -//# ifdef JSON_USE_CPPTL -// EnumMemberNames enumMemberNames() const; -// EnumValues enumValues() const; -//# endif - - /// Comments must be //... or /* ... */ - void setComment( const char *comment, - CommentPlacement placement ); - /// Comments must be //... or /* ... */ - void setComment( const std::string &comment, - CommentPlacement placement ); - bool hasComment( CommentPlacement placement ) const; - /// Include delimiters and embedded newlines. - std::string getComment( CommentPlacement placement ) const; - - std::string toStyledString() const; - - const_iterator begin() const; - const_iterator end() const; - - iterator begin(); - iterator end(); - - private: - Value &resolveReference( const char *key, - bool isStatic ); - -# ifdef JSON_VALUE_USE_INTERNAL_MAP - inline bool isItemAvailable() const - { - return itemIsUsed_ == 0; - } - - inline void setItemUsed( bool isUsed = true ) - { - itemIsUsed_ = isUsed ? 1 : 0; - } - - inline bool isMemberNameStatic() const - { - return memberNameIsStatic_ == 0; - } - - inline void setMemberNameIsStatic( bool isStatic ) - { - memberNameIsStatic_ = isStatic ? 1 : 0; - } -# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP - - private: - struct CommentInfo - { - CommentInfo(); - ~CommentInfo(); - - void setComment( const char *text ); - - char *comment_; - }; - - //struct MemberNamesTransform - //{ - // typedef const char *result_type; - // const char *operator()( const CZString &name ) const - // { - // return name.c_str(); - // } - //}; - - union ValueHolder - { - LargestInt int_; - LargestUInt uint_; - double real_; - bool bool_; - char *string_; -# ifdef JSON_VALUE_USE_INTERNAL_MAP - ValueInternalArray *array_; - ValueInternalMap *map_; -#else - ObjectValues *map_; -# endif - } value_; - ValueType type_ : 8; - int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. -# ifdef JSON_VALUE_USE_INTERNAL_MAP - unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. - int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. -# endif - CommentInfo *comments_; - }; - - - /** \brief Experimental and untested: represents an element of the "path" to access a node. - */ - class PathArgument - { - public: - friend class Path; - - PathArgument(); - PathArgument( ArrayIndex index ); - PathArgument( const char *key ); - PathArgument( const std::string &key ); - - private: - enum Kind - { - kindNone = 0, - kindIndex, - kindKey - }; - std::string key_; - ArrayIndex index_; - Kind kind_; - }; - - /** \brief Experimental and untested: represents a "path" to access a node. - * - * Syntax: - * - "." => root node - * - ".[n]" => elements at index 'n' of root node (an array value) - * - ".name" => member named 'name' of root node (an object value) - * - ".name1.name2.name3" - * - ".[0][1][2].name1[3]" - * - ".%" => member name is provided as parameter - * - ".[%]" => index is provied as parameter - */ - class Path - { - public: - Path( const std::string &path, - const PathArgument &a1 = PathArgument(), - const PathArgument &a2 = PathArgument(), - const PathArgument &a3 = PathArgument(), - const PathArgument &a4 = PathArgument(), - const PathArgument &a5 = PathArgument() ); - - const Value &resolve( const Value &root ) const; - Value resolve( const Value &root, - const Value &defaultValue ) const; - /// Creates the "path" to access the specified node and returns a reference on the node. - Value &make( Value &root ) const; - - private: - typedef std::vector InArgs; - typedef std::vector Args; - - void makePath( const std::string &path, - const InArgs &in ); - void addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ); - void invalidPath( const std::string &path, - int location ); - - Args args_; - }; - - - -#ifdef JSON_VALUE_USE_INTERNAL_MAP - /** \brief Allocator to customize Value internal map. - * Below is an example of a simple implementation (default implementation actually - * use memory pool for speed). - * \code - class DefaultValueMapAllocator : public ValueMapAllocator - { - public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } - }; - * \endcode - */ - class JSON_API ValueMapAllocator - { - public: - virtual ~ValueMapAllocator(); - virtual ValueInternalMap *newMap() = 0; - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; - virtual void destructMap( ValueInternalMap *map ) = 0; - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; - virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; - virtual ValueInternalLink *allocateMapLink() = 0; - virtual void releaseMapLink( ValueInternalLink *link ) = 0; - }; - - /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). - * \internal previous_ & next_ allows for bidirectional traversal. - */ - class JSON_API ValueInternalLink - { - public: - enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. - enum InternalFlags { - flagAvailable = 0, - flagUsed = 1 - }; - - ValueInternalLink(); - - ~ValueInternalLink(); - - Value items_[itemPerLink]; - char *keys_[itemPerLink]; - ValueInternalLink *previous_; - ValueInternalLink *next_; - }; - - - /** \brief A linked page based hash-table implementation used internally by Value. - * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked - * list in each bucket to handle collision. There is an addional twist in that - * each node of the collision linked list is a page containing a fixed amount of - * value. This provides a better compromise between memory usage and speed. - * - * Each bucket is made up of a chained list of ValueInternalLink. The last - * link of a given bucket can be found in the 'previous_' field of the following bucket. - * The last link of the last bucket is stored in tailLink_ as it has no following bucket. - * Only the last link of a bucket may contains 'available' item. The last link always - * contains at least one element unless is it the bucket one very first link. - */ - class JSON_API ValueInternalMap - { - friend class ValueIteratorBase; - friend class Value; - public: - typedef unsigned int HashKey; - typedef unsigned int BucketIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState - { - IteratorState() - : map_(0) - , link_(0) - , itemIndex_(0) - , bucketIndex_(0) - { - } - ValueInternalMap *map_; - ValueInternalLink *link_; - BucketIndex itemIndex_; - BucketIndex bucketIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalMap(); - ValueInternalMap( const ValueInternalMap &other ); - ValueInternalMap &operator =( const ValueInternalMap &other ); - ~ValueInternalMap(); - - void swap( ValueInternalMap &other ); - - BucketIndex size() const; - - void clear(); - - bool reserveDelta( BucketIndex growth ); - - bool reserve( BucketIndex newItemCount ); - - const Value *find( const char *key ) const; - - Value *find( const char *key ); - - Value &resolveReference( const char *key, - bool isStatic ); - - void remove( const char *key ); - - void doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ); - - ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); - - Value &setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ); - - Value &unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ); - - HashKey hash( const char *key ) const; - - int compare( const ValueInternalMap &other ) const; - - private: - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void incrementBucket( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static const char *key( const IteratorState &iterator ); - static const char *key( const IteratorState &iterator, bool &isStatic ); - static Value &value( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - - private: - ValueInternalLink *buckets_; - ValueInternalLink *tailLink_; - BucketIndex bucketsSize_; - BucketIndex itemCount_; - }; - - /** \brief A simplified deque implementation used internally by Value. - * \internal - * It is based on a list of fixed "page", each page contains a fixed number of items. - * Instead of using a linked-list, a array of pointer is used for fast item look-up. - * Look-up for an element is as follow: - * - compute page index: pageIndex = itemIndex / itemsPerPage - * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] - * - * Insertion is amortized constant time (only the array containing the index of pointers - * need to be reallocated when items are appended). - */ - class JSON_API ValueInternalArray - { - friend class Value; - friend class ValueIteratorBase; - public: - enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. - typedef Value::ArrayIndex ArrayIndex; - typedef unsigned int PageIndex; - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - struct IteratorState // Must be a POD - { - IteratorState() - : array_(0) - , currentPageIndex_(0) - , currentItemIndex_(0) - { - } - ValueInternalArray *array_; - Value **currentPageIndex_; - unsigned int currentItemIndex_; - }; -# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - - ValueInternalArray(); - ValueInternalArray( const ValueInternalArray &other ); - ValueInternalArray &operator =( const ValueInternalArray &other ); - ~ValueInternalArray(); - void swap( ValueInternalArray &other ); - - void clear(); - void resize( ArrayIndex newSize ); - - Value &resolveReference( ArrayIndex index ); - - Value *find( ArrayIndex index ) const; - - ArrayIndex size() const; - - int compare( const ValueInternalArray &other ) const; - - private: - static bool equals( const IteratorState &x, const IteratorState &other ); - static void increment( IteratorState &iterator ); - static void decrement( IteratorState &iterator ); - static Value &dereference( const IteratorState &iterator ); - static Value &unsafeDereference( const IteratorState &iterator ); - static int distance( const IteratorState &x, const IteratorState &y ); - static ArrayIndex indexOf( const IteratorState &iterator ); - void makeBeginIterator( IteratorState &it ) const; - void makeEndIterator( IteratorState &it ) const; - void makeIterator( IteratorState &it, ArrayIndex index ) const; - - void makeIndexValid( ArrayIndex index ); - - Value **pages_; - ArrayIndex size_; - PageIndex pageCount_; - }; - - /** \brief Experimental: do not use. Allocator to customize Value internal array. - * Below is an example of a simple implementation (actual implementation use - * memory pool). - \code -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destruct( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - \endcode - */ - class JSON_API ValueArrayAllocator - { - public: - virtual ~ValueArrayAllocator(); - virtual ValueInternalArray *newArray() = 0; - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; - virtual void destructArray( ValueInternalArray *array ) = 0; - /** \brief Reallocate array page index. - * Reallocates an array of pointer on each page. - * \param indexes [input] pointer on the current index. May be \c NULL. - * [output] pointer on the new index of at least - * \a minNewIndexCount pages. - * \param indexCount [input] current number of pages in the index. - * [output] number of page the reallocated index can handle. - * \b MUST be >= \a minNewIndexCount. - * \param minNewIndexCount Minimum number of page the new index must be able to - * handle. - */ - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) = 0; - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) = 0; - virtual Value *allocateArrayPage() = 0; - virtual void releaseArrayPage( Value *value ) = 0; - }; -#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP - - - /** \brief base class for Value iterators. - * - */ - class ValueIteratorBase - { - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef ValueIteratorBase SelfType; - - ValueIteratorBase(); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); -#else - ValueIteratorBase( const ValueInternalArray::IteratorState &state ); - ValueIteratorBase( const ValueInternalMap::IteratorState &state ); -#endif - - bool operator ==( const SelfType &other ) const - { - return isEqual( other ); - } - - bool operator !=( const SelfType &other ) const - { - return !isEqual( other ); - } - - difference_type operator -( const SelfType &other ) const - { - return computeDistance( other ); - } - - /// Return either the index or the member name of the referenced value as a Value. - Value key() const; - - /// Return the index of the referenced Value. -1 if it is not an arrayValue. - UInt index() const; - - /// Return the member name of the referenced Value. "" if it is not an objectValue. - const char *memberName() const; - - protected: - Value &deref() const; - - void increment(); - - void decrement(); - - difference_type computeDistance( const SelfType &other ) const; - - bool isEqual( const SelfType &other ) const; - - void copy( const SelfType &other ); - - private: -#ifndef JSON_VALUE_USE_INTERNAL_MAP - Value::ObjectValues::iterator current_; - // Indicates that iterator is for a null value. - bool isNull_; -#else - union - { - ValueInternalArray::IteratorState array_; - ValueInternalMap::IteratorState map_; - } iterator_; - bool isArray_; -#endif - }; - - /** \brief const iterator for object and array value. - * - */ - class ValueConstIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef const Value &reference; - typedef const Value *pointer; - typedef ValueConstIterator SelfType; - - ValueConstIterator(); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueConstIterator( const ValueInternalArray::IteratorState &state ); - ValueConstIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - SelfType &operator =( const ValueIteratorBase &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - - /** \brief Iterator for object and array value. - */ - class ValueIterator : public ValueIteratorBase - { - friend class Value; - public: - typedef unsigned int size_t; - typedef int difference_type; - typedef Value &reference; - typedef Value *pointer; - typedef ValueIterator SelfType; - - ValueIterator(); - ValueIterator( const ValueConstIterator &other ); - ValueIterator( const ValueIterator &other ); - private: - /*! \internal Use by Value to create an iterator. - */ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); -#else - ValueIterator( const ValueInternalArray::IteratorState &state ); - ValueIterator( const ValueInternalMap::IteratorState &state ); -#endif - public: - - SelfType &operator =( const SelfType &other ); - - SelfType operator++( int ) - { - SelfType temp( *this ); - ++*this; - return temp; - } - - SelfType operator--( int ) - { - SelfType temp( *this ); - --*this; - return temp; - } - - SelfType &operator--() - { - decrement(); - return *this; - } - - SelfType &operator++() - { - increment(); - return *this; - } - - reference operator *() const - { - return deref(); - } - }; - - -} // namespace Json - - -#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/writer.h b/tags/jsoncpp/0.6.0-rc2/include/json/writer.h deleted file mode 100644 index 4789363..0000000 --- a/tags/jsoncpp/0.6.0-rc2/include/json/writer.h +++ /dev/null @@ -1,185 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSON_WRITER_H_INCLUDED -# define JSON_WRITER_H_INCLUDED - -#if !defined(JSON_IS_AMALGAMATION) -# include "value.h" -#endif // if !defined(JSON_IS_AMALGAMATION) -# include -# include -# include - -namespace Json { - - class Value; - - /** \brief Abstract class for writers. - */ - class JSON_API Writer - { - public: - virtual ~Writer(); - - virtual std::string write( const Value &root ) = 0; - }; - - /** \brief Outputs a Value in JSON format without formatting (not human friendly). - * - * The JSON document is written in a single line. It is not intended for 'human' consumption, - * but may be usefull to support feature such as RPC where bandwith is limited. - * \sa Reader, Value - */ - class JSON_API FastWriter : public Writer - { - public: - FastWriter(); - virtual ~FastWriter(){} - - void enableYAMLCompatibility(); - - public: // overridden from Writer - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - - std::string document_; - bool yamlCompatiblityEnabled_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledWriter: public Writer - { - public: - StyledWriter(); - virtual ~StyledWriter(){} - - public: // overridden from Writer - /** \brief Serialize a Value in JSON format. - * \param root Value to serialize. - * \return String containing the JSON document that represents the root value. - */ - virtual std::string write( const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::string document_; - std::string indentString_; - int rightMargin_; - int indentSize_; - bool addChildValues_; - }; - - /** \brief Writes a Value in JSON format in a human friendly way, - to a stream rather than to a string. - * - * The rules for line break and indent are as follow: - * - Object value: - * - if empty then print {} without indent and line break - * - if not empty the print '{', line break & indent, print one value per line - * and then unindent and line break and print '}'. - * - Array value: - * - if empty then print [] without indent and line break - * - if the array contains no object value, empty array or some other value types, - * and all the values fit on one lines, then print the array on a single line. - * - otherwise, it the values do not fit on one line, or the array contains - * object or non empty array, then print one value per line. - * - * If the Value have comments then they are outputed according to their #CommentPlacement. - * - * \param indentation Each level will be indented by this amount extra. - * \sa Reader, Value, Value::setComment() - */ - class JSON_API StyledStreamWriter - { - public: - StyledStreamWriter( std::string indentation="\t" ); - ~StyledStreamWriter(){} - - public: - /** \brief Serialize a Value in JSON format. - * \param out Stream to write to. (Can be ostringstream, e.g.) - * \param root Value to serialize. - * \note There is no point in deriving from Writer, since write() should not return a value. - */ - void write( std::ostream &out, const Value &root ); - - private: - void writeValue( const Value &value ); - void writeArrayValue( const Value &value ); - bool isMultineArray( const Value &value ); - void pushValue( const std::string &value ); - void writeIndent(); - void writeWithIndent( const std::string &value ); - void indent(); - void unindent(); - void writeCommentBeforeValue( const Value &root ); - void writeCommentAfterValueOnSameLine( const Value &root ); - bool hasCommentForValue( const Value &value ); - static std::string normalizeEOL( const std::string &text ); - - typedef std::vector ChildValues; - - ChildValues childValues_; - std::ostream* document_; - std::string indentString_; - int rightMargin_; - std::string indentation_; - bool addChildValues_; - }; - -# if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( Int value ); - std::string JSON_API valueToString( UInt value ); -# endif // if defined(JSON_HAS_INT64) - std::string JSON_API valueToString( LargestInt value ); - std::string JSON_API valueToString( LargestUInt value ); - std::string JSON_API valueToString( double value ); - std::string JSON_API valueToString( bool value ); - std::string JSON_API valueToQuotedString( const char *value ); - - /// \brief Output using the StyledStreamWriter. - /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); - -} // namespace Json - - - -#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln deleted file mode 100644 index 5bfa366..0000000 --- a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln +++ /dev/null @@ -1,46 +0,0 @@ -Microsoft Visual Studio Solution File, Format Version 8.00 -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" - ProjectSection(ProjectDependencies) = postProject - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" - ProjectSection(ProjectDependencies) = postProject - {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} - EndProjectSection -EndProject -Global - GlobalSection(SolutionConfiguration) = preSolution - Debug = Debug - dummy = dummy - Release = Release - EndGlobalSection - GlobalSection(ProjectConfiguration) = postSolution - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 - {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 - {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 - {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - EndGlobalSection - GlobalSection(ExtensibilityAddIns) = postSolution - EndGlobalSection -EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj deleted file mode 100644 index 99a4dd6..0000000 --- a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj +++ /dev/null @@ -1,119 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj deleted file mode 100644 index 2d7bf99..0000000 --- a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj +++ /dev/null @@ -1,214 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj deleted file mode 100644 index df36700..0000000 --- a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj +++ /dev/null @@ -1,130 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/tags/jsoncpp/0.6.0-rc2/makerelease.py b/tags/jsoncpp/0.6.0-rc2/makerelease.py deleted file mode 100644 index 6b8eec3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/makerelease.py +++ /dev/null @@ -1,380 +0,0 @@ -"""Tag the sandbox for release, make source and doc tarballs. - -Requires Python 2.6 - -Example of invocation (use to test the script): -python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev - -When testing this script: -python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev - -Example of invocation when doing a release: -python makerelease.py 0.5.0 0.6.0-dev -""" -import os.path -import subprocess -import sys -import doxybuild -import subprocess -import xml.etree.ElementTree as ElementTree -import shutil -import urllib2 -import tempfile -import os -import time -from devtools import antglob, fixeol, tarball -import amalgamate - -SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' -SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' -SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' -SOURCEFORGE_PROJECT = 'jsoncpp' - -def set_version( version ): - with open('version','wb') as f: - f.write( version.strip() ) - -def rmdir_if_exist( dir_path ): - if os.path.isdir( dir_path ): - shutil.rmtree( dir_path ) - -class SVNError(Exception): - pass - -def svn_command( command, *args ): - cmd = ['svn', '--non-interactive', command] + list(args) - print 'Running:', ' '.join( cmd ) - process = subprocess.Popen( cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - error = SVNError( 'SVN command failed:\n' + stdout ) - error.returncode = process.returncode - raise error - return stdout - -def check_no_pending_commit(): - """Checks that there is no pending commit in the sandbox.""" - stdout = svn_command( 'status', '--xml' ) - etree = ElementTree.fromstring( stdout ) - msg = [] - for entry in etree.getiterator( 'entry' ): - path = entry.get('path') - status = entry.find('wc-status').get('item') - if status != 'unversioned' and path != 'version': - msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) - if msg: - msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) - return '\n'.join( msg ) - -def svn_join_url( base_url, suffix ): - if not base_url.endswith('/'): - base_url += '/' - if suffix.startswith('/'): - suffix = suffix[1:] - return base_url + suffix - -def svn_check_if_tag_exist( tag_url ): - """Checks if a tag exist. - Returns: True if the tag exist, False otherwise. - """ - try: - list_stdout = svn_command( 'list', tag_url ) - except SVNError, e: - if e.returncode != 1 or not str(e).find('tag_url'): - raise e - # otherwise ignore error, meaning tag does not exist - return False - return True - -def svn_commit( message ): - """Commit the sandbox, providing the specified comment. - """ - svn_command( 'ci', '-m', message ) - -def svn_tag_sandbox( tag_url, message ): - """Makes a tag based on the sandbox revisions. - """ - svn_command( 'copy', '-m', message, '.', tag_url ) - -def svn_remove_tag( tag_url, message ): - """Removes an existing tag. - """ - svn_command( 'delete', '-m', message, tag_url ) - -def svn_export( tag_url, export_dir ): - """Exports the tag_url revision to export_dir. - Target directory, including its parent is created if it does not exist. - If the directory export_dir exist, it is deleted before export proceed. - """ - rmdir_if_exist( export_dir ) - svn_command( 'export', tag_url, export_dir ) - -def fix_sources_eol( dist_dir ): - """Set file EOL for tarball distribution. - """ - print 'Preparing exported source file EOL for distribution...' - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - win_sources = antglob.glob( dist_dir, - includes = '**/*.sln **/*.vcproj', - prune_dirs = prune_dirs ) - unix_sources = antglob.glob( dist_dir, - includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in - sconscript *.json *.expected AUTHORS LICENSE''', - excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', - prune_dirs = prune_dirs ) - for path in win_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) - for path in unix_sources: - fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) - -def download( url, target_path ): - """Download file represented by url to target_path. - """ - f = urllib2.urlopen( url ) - try: - data = f.read() - finally: - f.close() - fout = open( target_path, 'wb' ) - try: - fout.write( data ) - finally: - fout.close() - -def check_compile( distcheck_top_dir, platform ): - cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] - print 'Running:', ' '.join( cmd ) - log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) - flog = open( log_path, 'wb' ) - try: - process = subprocess.Popen( cmd, - stdout=flog, - stderr=subprocess.STDOUT, - cwd=distcheck_top_dir ) - stdout = process.communicate()[0] - status = (process.returncode == 0) - finally: - flog.close() - return (status, log_path) - -def write_tempfile( content, **kwargs ): - fd, path = tempfile.mkstemp( **kwargs ) - f = os.fdopen( fd, 'wt' ) - try: - f.write( content ) - finally: - f.close() - return path - -class SFTPError(Exception): - pass - -def run_sftp_batch( userhost, sftp, batch, retry=0 ): - path = write_tempfile( batch, suffix='.sftp', text=True ) - # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc - cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] - error = None - for retry_index in xrange(0, max(1,retry)): - heading = retry_index == 0 and 'Running:' or 'Retrying:' - print heading, ' '.join( cmd ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode != 0: - error = SFTPError( 'SFTP batch failed:\n' + stdout ) - else: - break - if error: - raise error - return stdout - -def sourceforge_web_synchro( sourceforge_project, doc_dir, - user=None, sftp='sftp' ): - """Notes: does not synchronize sub-directory of doc-dir. - """ - userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) - stdout = run_sftp_batch( userhost, sftp, """ -cd htdocs -dir -exit -""" ) - existing_paths = set() - collect = 0 - for line in stdout.split('\n'): - line = line.strip() - if not collect and line.endswith('> dir'): - collect = True - elif collect and line.endswith('> exit'): - break - elif collect == 1: - collect = 2 - elif collect == 2: - path = line.strip().split()[-1:] - if path and path[0] not in ('.', '..'): - existing_paths.add( path[0] ) - upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) - paths_to_remove = existing_paths - upload_paths - if paths_to_remove: - print 'Removing the following file from web:' - print '\n'.join( paths_to_remove ) - stdout = run_sftp_batch( userhost, sftp, """cd htdocs -rm %s -exit""" % ' '.join(paths_to_remove) ) - print 'Uploading %d files:' % len(upload_paths) - batch_size = 10 - upload_paths = list(upload_paths) - start_time = time.time() - for index in xrange(0,len(upload_paths),batch_size): - paths = upload_paths[index:index+batch_size] - file_per_sec = (time.time() - start_time) / (index+1) - remaining_files = len(upload_paths) - index - remaining_sec = file_per_sec * remaining_files - print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) - run_sftp_batch( userhost, sftp, """cd htdocs -lcd %s -mput %s -exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) - -def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): - userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) - run_sftp_batch( userhost, sftp, """ -mput %s -exit -""" % (' '.join(paths),) ) - - -def main(): - usage = """%prog release_version next_dev_version -Update 'version' file to release_version and commit. -Generates the document tarball. -Tags the sandbox revision with release_version. -Update 'version' file to next_dev_version and commit. - -Performs an svn export of tag release version, and build a source tarball. - -Must be started in the project top directory. - -Warning: --force should only be used when developping/testing the release script. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), - help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") - parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), - help="""Path to Doxygen tool. [Default: %default]""") - parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, - help="""Ignore pending commit. [Default: %default]""") - parser.add_option('--retag', dest="retag_release", action='store_true', default=False, - help="""Overwrite release existing tag if it exist. [Default: %default]""") - parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', - help="""Comma separated list of platform passed to scons for build check.""") - parser.add_option('--no-test', dest="no_test", action='store_true', default=False, - help="""Skips build check.""") - parser.add_option('--no-web', dest="no_web", action='store_true', default=False, - help="""Do not update web site.""") - parser.add_option('-u', '--upload-user', dest="user", action='store', - help="""Sourceforge user for SFTP documentation upload.""") - parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), - help="""Path of the SFTP compatible binary used to upload the documentation.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 2: - parser.error( 'release_version missing on command-line.' ) - release_version = args[0] - next_version = args[1] - - if not options.platforms and not options.no_test: - parser.error( 'You must specify either --platform or --no-test option.' ) - - if options.ignore_pending_commit: - msg = '' - else: - msg = check_no_pending_commit() - if not msg: - print 'Setting version to', release_version - set_version( release_version ) - svn_commit( 'Release ' + release_version ) - tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) - if svn_check_if_tag_exist( tag_url ): - if options.retag_release: - svn_remove_tag( tag_url, 'Overwriting previous tag' ) - else: - print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url - sys.exit( 1 ) - svn_tag_sandbox( tag_url, 'Release ' + release_version ) - - print 'Generated doxygen document...' -## doc_dirname = r'jsoncpp-api-html-0.5.0' -## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' - doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) - doc_distcheck_dir = 'dist/doccheck' - tarball.decompress( doc_tarball_path, doc_distcheck_dir ) - doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) - - export_dir = 'dist/export' - svn_export( tag_url, export_dir ) - fix_sources_eol( export_dir ) - - source_dir = 'jsoncpp-src-' + release_version - source_tarball_path = 'dist/%s.tar.gz' % source_dir - print 'Generating source tarball to', source_tarball_path - tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) - - amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir - print 'Generating amalgamation source tarball to', amalgamation_tarball_path - amalgamation_dir = 'dist/amalgamation' - amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) - amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version - tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], - amalgamation_dir, prefix_dir=amalgamation_source_dir ) - - # Decompress source tarball, download and install scons-local - distcheck_dir = 'dist/distcheck' - distcheck_top_dir = distcheck_dir + '/' + source_dir - print 'Decompressing source tarball to', distcheck_dir - rmdir_if_exist( distcheck_dir ) - tarball.decompress( source_tarball_path, distcheck_dir ) - scons_local_path = 'dist/scons-local.tar.gz' - print 'Downloading scons-local to', scons_local_path - download( SCONS_LOCAL_URL, scons_local_path ) - print 'Decompressing scons-local to', distcheck_top_dir - tarball.decompress( scons_local_path, distcheck_top_dir ) - - # Run compilation - print 'Compiling decompressed tarball' - all_build_status = True - for platform in options.platforms.split(','): - print 'Testing platform:', platform - build_status, log_path = check_compile( distcheck_top_dir, platform ) - print 'see build log:', log_path - print build_status and '=> ok' or '=> FAILED' - all_build_status = all_build_status and build_status - if not build_status: - print 'Testing failed on at least one platform, aborting...' - svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) - sys.exit(1) - if options.user: - if not options.no_web: - print 'Uploading documentation using user', options.user - sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) - print 'Completed documentation upload' - print 'Uploading source and documentation tarballs for release using user', options.user - sourceforge_release_tarball( SOURCEFORGE_PROJECT, - [source_tarball_path, doc_tarball_path], - user=options.user, sftp=options.sftp ) - print 'Source and doc release tarballs uploaded' - else: - print 'No upload user specified. Web site and download tarbal were not uploaded.' - print 'Tarball can be found at:', doc_tarball_path - - # Set next version number and commit - set_version( next_version ) - svn_commit( 'Released ' + release_version ) - else: - sys.stderr.write( msg + '\n' ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py deleted file mode 100644 index 8ee3cbb..0000000 --- a/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py +++ /dev/null @@ -1,53 +0,0 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment - Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py deleted file mode 100644 index 864ff40..0000000 --- a/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import os.path -from fnmatch import fnmatch -import targz - -##def DoxyfileParse(file_contents): -## """ -## Parse a Doxygen source file and return a dictionary of all the values. -## Values will be strings and lists of strings. -## """ -## data = {} -## -## import shlex -## lex = shlex.shlex(instream = file_contents, posix = True) -## lex.wordchars += "*+./-:" -## lex.whitespace = lex.whitespace.replace("\n", "") -## lex.escape = "" -## -## lineno = lex.lineno -## last_backslash_lineno = lineno -## token = lex.get_token() -## key = token # the first token should be a key -## last_token = "" -## key_token = False -## next_key = False -## new_data = True -## -## def append_data(data, key, new_data, token): -## if new_data or len(data[key]) == 0: -## data[key].append(token) -## else: -## data[key][-1] += token -## -## while token: -## if token in ['\n']: -## if last_token not in ['\\']: -## key_token = True -## elif token in ['\\']: -## pass -## elif key_token: -## key = token -## key_token = False -## else: -## if token == "+=": -## if not data.has_key(key): -## data[key] = list() -## elif token == "=": -## data[key] = list() -## else: -## append_data( data, key, new_data, token ) -## new_data = True -## -## last_token = token -## token = lex.get_token() -## -## if last_token == '\\' and token != '\n': -## new_data = False -## append_data( data, key, new_data, '\\' ) -## -## # compress lists of len 1 into single strings -## for (k, v) in data.items(): -## if len(v) == 0: -## data.pop(k) -## -## # items in the following list will be kept as lists and not converted to strings -## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: -## continue -## -## if len(v) == 1: -## data[k] = v[0] -## -## return data -## -##def DoxySourceScan(node, env, path): -## """ -## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add -## any files used to generate docs to the list of source files. -## """ -## default_file_patterns = [ -## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', -## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', -## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', -## '*.py', -## ] -## -## default_exclude_patterns = [ -## '*~', -## ] -## -## sources = [] -## -## data = DoxyfileParse(node.get_contents()) -## -## if data.get("RECURSIVE", "NO") == "YES": -## recursive = True -## else: -## recursive = False -## -## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) -## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) -## -## for node in data.get("INPUT", []): -## if os.path.isfile(node): -## sources.add(node) -## elif os.path.isdir(node): -## if recursive: -## for root, dirs, files in os.walk(node): -## for f in files: -## filename = os.path.join(root, f) -## -## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) -## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) -## -## if pattern_check and not exclude_check: -## sources.append(filename) -## else: -## for pattern in file_patterns: -## sources.extend(glob.glob("/".join([node, pattern]))) -## sources = map( lambda path: env.File(path), sources ) -## return sources -## -## -##def DoxySourceScanCheck(node, env): -## """Check if we should scan this file""" -## return os.path.isfile(node.path) - -def srcDistEmitter(source, target, env): -## """Doxygen Doxyfile emitter""" -## # possible output formats and their default values and output locations -## output_formats = { -## "HTML": ("YES", "html"), -## "LATEX": ("YES", "latex"), -## "RTF": ("NO", "rtf"), -## "MAN": ("YES", "man"), -## "XML": ("NO", "xml"), -## } -## -## data = DoxyfileParse(source[0].get_contents()) -## -## targets = [] -## out_dir = data.get("OUTPUT_DIRECTORY", ".") -## -## # add our output locations -## for (k, v) in output_formats.items(): -## if data.get("GENERATE_" + k, v[0]) == "YES": -## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) -## -## # don't clobber targets -## for node in targets: -## env.Precious(node) -## -## # set up cleaning stuff -## for node in targets: -## env.Clean(node, node) -## -## return (targets, source) - return (target,source) - -def generate(env): - """ - Add builders and construction variables for the - SrcDist tool. - """ -## doxyfile_scanner = env.Scanner( -## DoxySourceScan, -## "DoxySourceScan", -## scan_check = DoxySourceScanCheck, -## ) - - if targz.exists(env): - srcdist_builder = targz.makeBuilder( srcDistEmitter ) - - env['BUILDERS']['SrcDist'] = srcdist_builder - -def exists(env): - """ - Make sure srcdist exists. - """ - return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py deleted file mode 100644 index 4d30585..0000000 --- a/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py +++ /dev/null @@ -1,79 +0,0 @@ -import re -from SCons.Script import * # the usual scons stuff you get in a SConscript - -def generate(env): - """ - Add builders and construction variables for the - SubstInFile tool. - - Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT - from the source to the target. - The values of SUBST_DICT first have any construction variables expanded - (its keys are not expanded). - If a value of SUBST_DICT is a python callable function, it is called and - the result is expanded as the value. - If there's more than one source and more than one target, each target gets - substituted from the corresponding source. - """ - def do_subst_in_file(targetfile, sourcefile, dict): - """Replace all instances of the keys of dict with their values. - For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, - then all instances of %VERSION% in the file will be replaced with 1.2345 etc. - """ - try: - f = open(sourcefile, 'rb') - contents = f.read() - f.close() - except: - raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile - for (k,v) in dict.items(): - contents = re.sub(k, v, contents) - try: - f = open(targetfile, 'wb') - f.write(contents) - f.close() - except: - raise SCons.Errors.UserError, "Can't write target file %s"%targetfile - return 0 # success - - def subst_in_file(target, source, env): - if not env.has_key('SUBST_DICT'): - raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." - d = dict(env['SUBST_DICT']) # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()).replace('\\','\\\\') - elif SCons.Util.is_String(v): - d[k] = env.subst(v).replace('\\','\\\\') - else: - raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) - for (t,s) in zip(target, source): - return do_subst_in_file(str(t), str(s), d) - - def subst_in_file_string(target, source, env): - """This is what gets printed on the console.""" - return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) - for (t,s) in zip(target, source)]) - - def subst_emitter(target, source, env): - """Add dependency from substituted SUBST_DICT to target. - Returns original target, source tuple unchanged. - """ - d = env['SUBST_DICT'].copy() # copy it - for (k,v) in d.items(): - if callable(v): - d[k] = env.subst(v()) - elif SCons.Util.is_String(v): - d[k]=env.subst(v) - Depends(target, SCons.Node.Python.Value(d)) - return target, source - -## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? - subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) - env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) - -def exists(env): - """ - Make sure tool exists. - """ - return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py deleted file mode 100644 index f543200..0000000 --- a/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py +++ /dev/null @@ -1,82 +0,0 @@ -"""tarball - -Tool-specific initialization for tarball. - -""" - -## Commands to tackle a command based implementation: -##to unpack on the fly... -##gunzip < FILE.tar.gz | tar xvf - -##to pack on the fly... -##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz - -import os.path - -import SCons.Builder -import SCons.Node.FS -import SCons.Util - -try: - import gzip - import tarfile - internal_targz = 1 -except ImportError: - internal_targz = 0 - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -if internal_targz: - def targz(target, source, env): - def archive_name( path ): - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - return archive_name - - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - tar.add(path, archive_name(path) ) - compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) - base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) - target_path = str(target[0]) - fileobj = gzip.GzipFile( target_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) - for source in source: - source_path = str(source) - if source.isdir(): - os.path.walk(source_path, visit, tar) - else: - tar.add(source_path, archive_name(source_path) ) # filename, arcname - tar.close() - - targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) - - def makeBuilder( emitter = None ): - return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), - source_factory = SCons.Node.FS.Entry, - source_scanner = SCons.Defaults.DirScanner, - suffix = '$TARGZ_SUFFIX', - multi = 1) - TarGzBuilder = makeBuilder() - - def generate(env): - """Add Builders and construction variables for zip to an Environment. - The following environnement variables may be set: - TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). - TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative - to something other than top-dir). - """ - env['BUILDERS']['TarGz'] = TarGzBuilder - env['TARGZ_COM'] = targzAction - env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 - env['TARGZ_SUFFIX'] = '.tar.gz' - env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. -else: - def generate(env): - pass - - -def exists(env): - return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp deleted file mode 100644 index dfb6150..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -/* This executable is used for testing parser/writer using real JSON files. - */ - - -#include -#include // sort -#include - -#if defined(_MSC_VER) && _MSC_VER >= 1310 -# pragma warning( disable: 4996 ) // disable fopen deprecation warning -#endif - -static std::string -readInputTestFile( const char *path ) -{ - FILE *file = fopen( path, "rb" ); - if ( !file ) - return std::string(""); - fseek( file, 0, SEEK_END ); - long size = ftell( file ); - fseek( file, 0, SEEK_SET ); - std::string text; - char *buffer = new char[size+1]; - buffer[size] = 0; - if ( fread( buffer, 1, size, file ) == (unsigned long)size ) - text = buffer; - fclose( file ); - delete[] buffer; - return text; -} - - -static void -printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) -{ - switch ( value.type() ) - { - case Json::nullValue: - fprintf( fout, "%s=null\n", path.c_str() ); - break; - case Json::intValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); - break; - case Json::uintValue: - fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); - break; - case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); - break; - case Json::stringValue: - fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); - break; - case Json::booleanValue: - fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); - break; - case Json::arrayValue: - { - fprintf( fout, "%s=[]\n", path.c_str() ); - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - static char buffer[16]; - sprintf( buffer, "[%d]", index ); - printValueTree( fout, value[index], path + buffer ); - } - } - break; - case Json::objectValue: - { - fprintf( fout, "%s={}\n", path.c_str() ); - Json::Value::Members members( value.getMemberNames() ); - std::sort( members.begin(), members.end() ); - std::string suffix = *(path.end()-1) == '.' ? "" : "."; - for ( Json::Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - printValueTree( fout, value[name], path + suffix + name ); - } - } - break; - default: - break; - } -} - - -static int -parseAndSaveValueTree( const std::string &input, - const std::string &actual, - const std::string &kind, - Json::Value &root, - const Json::Features &features, - bool parseOnly ) -{ - Json::Reader reader( features ); - bool parsingSuccessful = reader.parse( input, root ); - if ( !parsingSuccessful ) - { - printf( "Failed to parse %s file: \n%s\n", - kind.c_str(), - reader.getFormattedErrorMessages().c_str() ); - return 1; - } - - if ( !parseOnly ) - { - FILE *factual = fopen( actual.c_str(), "wt" ); - if ( !factual ) - { - printf( "Failed to create %s actual file.\n", kind.c_str() ); - return 2; - } - printValueTree( factual, root ); - fclose( factual ); - } - return 0; -} - - -static int -rewriteValueTree( const std::string &rewritePath, - const Json::Value &root, - std::string &rewrite ) -{ - //Json::FastWriter writer; - //writer.enableYAMLCompatibility(); - Json::StyledWriter writer; - rewrite = writer.write( root ); - FILE *fout = fopen( rewritePath.c_str(), "wt" ); - if ( !fout ) - { - printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); - return 2; - } - fprintf( fout, "%s\n", rewrite.c_str() ); - fclose( fout ); - return 0; -} - - -static std::string -removeSuffix( const std::string &path, - const std::string &extension ) -{ - if ( extension.length() >= path.length() ) - return std::string(""); - std::string suffix = path.substr( path.length() - extension.length() ); - if ( suffix != extension ) - return std::string(""); - return path.substr( 0, path.length() - extension.length() ); -} - - -static void -printConfig() -{ - // Print the configuration used to compile JsonCpp -#if defined(JSON_NO_INT64) - printf( "JSON_NO_INT64=1\n" ); -#else - printf( "JSON_NO_INT64=0\n" ); -#endif -} - - -static int -printUsage( const char *argv[] ) -{ - printf( "Usage: %s [--strict] input-json-file", argv[0] ); - return 3; -} - - -int -parseCommandLine( int argc, const char *argv[], - Json::Features &features, std::string &path, - bool &parseOnly ) -{ - parseOnly = false; - if ( argc < 2 ) - { - return printUsage( argv ); - } - - int index = 1; - if ( std::string(argv[1]) == "--json-checker" ) - { - features = Json::Features::strictMode(); - parseOnly = true; - ++index; - } - - if ( std::string(argv[1]) == "--json-config" ) - { - printConfig(); - return 3; - } - - if ( index == argc || index + 1 < argc ) - { - return printUsage( argv ); - } - - path = argv[index]; - return 0; -} - - -int main( int argc, const char *argv[] ) -{ - std::string path; - Json::Features features; - bool parseOnly; - int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); - if ( exitCode != 0 ) - { - return exitCode; - } - - try - { - std::string input = readInputTestFile( path.c_str() ); - if ( input.empty() ) - { - printf( "Failed to read input or empty input: %s\n", path.c_str() ); - return 3; - } - - std::string basePath = removeSuffix( argv[1], ".json" ); - if ( !parseOnly && basePath.empty() ) - { - printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); - return 3; - } - - std::string actualPath = basePath + ".actual"; - std::string rewritePath = basePath + ".rewrite"; - std::string rewriteActualPath = basePath + ".actual-rewrite"; - - Json::Value root; - exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); - if ( exitCode == 0 && !parseOnly ) - { - std::string rewrite; - exitCode = rewriteValueTree( rewritePath, root, rewrite ); - if ( exitCode == 0 ) - { - Json::Value rewriteRoot; - exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, - "rewrite", rewriteRoot, features, parseOnly ); - } - } - } - catch ( const std::exception &e ) - { - printf( "Unhandled exception:\n%s\n", e.what() ); - exitCode = 1; - } - - return exitCode; -} - diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript deleted file mode 100644 index 6e68e31..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript +++ /dev/null @@ -1,9 +0,0 @@ -Import( 'env_testing buildJSONTests' ) - -buildJSONTests( env_testing, Split( """ - main.cpp - """ ), - 'jsontestrunner' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h deleted file mode 100644 index 173e2ed..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED -# define JSONCPP_BATCHALLOCATOR_H_INCLUDED - -# include -# include - -# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION - -namespace Json { - -/* Fast memory allocator. - * - * This memory allocator allocates memory for a batch of object (specified by - * the page size, the number of object in each page). - * - * It does not allow the destruction of a single object. All the allocated objects - * can be destroyed at once. The memory can be either released or reused for future - * allocation. - * - * The in-place new operator must be used to construct the object using the pointer - * returned by allocate. - */ -template -class BatchAllocator -{ -public: - typedef AllocatedType Type; - - BatchAllocator( unsigned int objectsPerPage = 255 ) - : freeHead_( 0 ) - , objectsPerPage_( objectsPerPage ) - { -// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); - assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. - assert( objectsPerPage >= 16 ); - batches_ = allocateBatch( 0 ); // allocated a dummy page - currentBatch_ = batches_; - } - - ~BatchAllocator() - { - for ( BatchInfo *batch = batches_; batch; ) - { - BatchInfo *nextBatch = batch->next_; - free( batch ); - batch = nextBatch; - } - } - - /// allocate space for an array of objectPerAllocation object. - /// @warning it is the responsability of the caller to call objects constructors. - AllocatedType *allocate() - { - if ( freeHead_ ) // returns node from free list. - { - AllocatedType *object = freeHead_; - freeHead_ = *(AllocatedType **)object; - return object; - } - if ( currentBatch_->used_ == currentBatch_->end_ ) - { - currentBatch_ = currentBatch_->next_; - while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) - currentBatch_ = currentBatch_->next_; - - if ( !currentBatch_ ) // no free batch found, allocate a new one - { - currentBatch_ = allocateBatch( objectsPerPage_ ); - currentBatch_->next_ = batches_; // insert at the head of the list - batches_ = currentBatch_; - } - } - AllocatedType *allocated = currentBatch_->used_; - currentBatch_->used_ += objectPerAllocation; - return allocated; - } - - /// Release the object. - /// @warning it is the responsability of the caller to actually destruct the object. - void release( AllocatedType *object ) - { - assert( object != 0 ); - *(AllocatedType **)object = freeHead_; - freeHead_ = object; - } - -private: - struct BatchInfo - { - BatchInfo *next_; - AllocatedType *used_; - AllocatedType *end_; - AllocatedType buffer_[objectPerAllocation]; - }; - - // disabled copy constructor and assignement operator. - BatchAllocator( const BatchAllocator & ); - void operator =( const BatchAllocator &); - - static BatchInfo *allocateBatch( unsigned int objectsPerPage ) - { - const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation - + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; - BatchInfo *batch = static_cast( malloc( mallocSize ) ); - batch->next_ = 0; - batch->used_ = batch->buffer_; - batch->end_ = batch->buffer_ + objectsPerPage; - return batch; - } - - BatchInfo *batches_; - BatchInfo *currentBatch_; - /// Head of a single linked list within the allocated space of freeed object - AllocatedType *freeHead_; - unsigned int objectsPerPage_; -}; - - -} // namespace Json - -# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION - -#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED - diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl deleted file mode 100644 index 3a532ad..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl +++ /dev/null @@ -1,456 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueArrayAllocator::~ValueArrayAllocator() -{ -} - -// ////////////////////////////////////////////////////////////////// -// class DefaultValueArrayAllocator -// ////////////////////////////////////////////////////////////////// -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - return new ValueInternalArray(); - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - return new ValueInternalArray( other ); - } - - virtual void destructArray( ValueInternalArray *array ) - { - delete array; - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - free( value ); - } -}; - -#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueArrayAllocator : public ValueArrayAllocator -{ -public: // overridden from ValueArrayAllocator - virtual ~DefaultValueArrayAllocator() - { - } - - virtual ValueInternalArray *newArray() - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray(); // placement new - return array; - } - - virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) - { - ValueInternalArray *array = arraysAllocator_.allocate(); - new (array) ValueInternalArray( other ); // placement new - return array; - } - - virtual void destructArray( ValueInternalArray *array ) - { - if ( array ) - { - array->~ValueInternalArray(); - arraysAllocator_.release( array ); - } - } - - virtual void reallocateArrayPageIndex( Value **&indexes, - ValueInternalArray::PageIndex &indexCount, - ValueInternalArray::PageIndex minNewIndexCount ) - { - ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; - if ( minNewIndexCount > newIndexCount ) - newIndexCount = minNewIndexCount; - void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); - indexCount = newIndexCount; - indexes = static_cast( newIndexes ); - } - virtual void releaseArrayPageIndex( Value **indexes, - ValueInternalArray::PageIndex indexCount ) - { - if ( indexes ) - free( indexes ); - } - - virtual Value *allocateArrayPage() - { - return static_cast( pagesAllocator_.allocate() ); - } - - virtual void releaseArrayPage( Value *value ) - { - if ( value ) - pagesAllocator_.release( value ); - } -private: - BatchAllocator arraysAllocator_; - BatchAllocator pagesAllocator_; -}; -#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR - -static ValueArrayAllocator *&arrayAllocator() -{ - static DefaultValueArrayAllocator defaultAllocator; - static ValueArrayAllocator *arrayAllocator = &defaultAllocator; - return arrayAllocator; -} - -static struct DummyArrayAllocatorInitializer { - DummyArrayAllocatorInitializer() - { - arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). - } -} dummyArrayAllocatorInitializer; - -// ////////////////////////////////////////////////////////////////// -// class ValueInternalArray -// ////////////////////////////////////////////////////////////////// -bool -ValueInternalArray::equals( const IteratorState &x, - const IteratorState &other ) -{ - return x.array_ == other.array_ - && x.currentItemIndex_ == other.currentItemIndex_ - && x.currentPageIndex_ == other.currentPageIndex_; -} - - -void -ValueInternalArray::increment( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - != it.array_->size_, - "ValueInternalArray::increment(): moving iterator beyond end" ); - ++(it.currentItemIndex_); - if ( it.currentItemIndex_ == itemsPerPage ) - { - it.currentItemIndex_ = 0; - ++(it.currentPageIndex_); - } -} - - -void -ValueInternalArray::decrement( IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ - && it.currentItemIndex_ == 0, - "ValueInternalArray::decrement(): moving iterator beyond end" ); - if ( it.currentItemIndex_ == 0 ) - { - it.currentItemIndex_ = itemsPerPage-1; - --(it.currentPageIndex_); - } - else - { - --(it.currentItemIndex_); - } -} - - -Value & -ValueInternalArray::unsafeDereference( const IteratorState &it ) -{ - return (*(it.currentPageIndex_))[it.currentItemIndex_]; -} - - -Value & -ValueInternalArray::dereference( const IteratorState &it ) -{ - JSON_ASSERT_MESSAGE( it.array_ && - (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ - < it.array_->size_, - "ValueInternalArray::dereference(): dereferencing invalid iterator" ); - return unsafeDereference( it ); -} - -void -ValueInternalArray::makeBeginIterator( IteratorState &it ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = 0; - it.currentPageIndex_ = pages_; -} - - -void -ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const -{ - it.array_ = const_cast( this ); - it.currentItemIndex_ = index % itemsPerPage; - it.currentPageIndex_ = pages_ + index / itemsPerPage; -} - - -void -ValueInternalArray::makeEndIterator( IteratorState &it ) const -{ - makeIterator( it, size_ ); -} - - -ValueInternalArray::ValueInternalArray() - : pages_( 0 ) - , size_( 0 ) - , pageCount_( 0 ) -{ -} - - -ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) - : pages_( 0 ) - , pageCount_( 0 ) - , size_( other.size_ ) -{ - PageIndex minNewPages = other.size_ / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, - "ValueInternalArray::reserve(): bad reallocation" ); - IteratorState itOther; - other.makeBeginIterator( itOther ); - Value *value; - for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) - { - if ( index % itemsPerPage == 0 ) - { - PageIndex pageIndex = index / itemsPerPage; - value = arrayAllocator()->allocateArrayPage(); - pages_[pageIndex] = value; - } - new (value) Value( dereference( itOther ) ); - } -} - - -ValueInternalArray & -ValueInternalArray::operator =( const ValueInternalArray &other ) -{ - ValueInternalArray temp( other ); - swap( temp ); - return *this; -} - - -ValueInternalArray::~ValueInternalArray() -{ - // destroy all constructed items - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - // release all pages - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - // release pages index - arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); -} - - -void -ValueInternalArray::swap( ValueInternalArray &other ) -{ - Value **tempPages = pages_; - pages_ = other.pages_; - other.pages_ = tempPages; - ArrayIndex tempSize = size_; - size_ = other.size_; - other.size_ = tempSize; - PageIndex tempPageCount = pageCount_; - pageCount_ = other.pageCount_; - other.pageCount_ = tempPageCount; -} - -void -ValueInternalArray::clear() -{ - ValueInternalArray dummy; - swap( dummy ); -} - - -void -ValueInternalArray::resize( ArrayIndex newSize ) -{ - if ( newSize == 0 ) - clear(); - else if ( newSize < size_ ) - { - IteratorState it; - IteratorState itEnd; - makeIterator( it, newSize ); - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - value->~Value(); - } - PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; - PageIndex lastPageIndex = size_ / itemsPerPage; - for ( ; pageIndex < lastPageIndex; ++pageIndex ) - arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); - size_ = newSize; - } - else if ( newSize > size_ ) - resolveReference( newSize ); -} - - -void -ValueInternalArray::makeIndexValid( ArrayIndex index ) -{ - // Need to enlarge page index ? - if ( index >= pageCount_ * itemsPerPage ) - { - PageIndex minNewPages = (index + 1) / itemsPerPage; - arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); - JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); - } - - // Need to allocate new pages ? - ArrayIndex nextPageIndex = - (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage - : size_; - if ( nextPageIndex <= index ) - { - PageIndex pageIndex = nextPageIndex / itemsPerPage; - PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; - for ( ; pageToAllocate-- > 0; ++pageIndex ) - pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); - } - - // Initialize all new entries - IteratorState it; - IteratorState itEnd; - makeIterator( it, size_ ); - size_ = index + 1; - makeIterator( itEnd, size_ ); - for ( ; !equals(it,itEnd); increment(it) ) - { - Value *value = &dereference(it); - new (value) Value(); // Construct a default value using placement new - } -} - -Value & -ValueInternalArray::resolveReference( ArrayIndex index ) -{ - if ( index >= size_ ) - makeIndexValid( index ); - return pages_[index/itemsPerPage][index%itemsPerPage]; -} - -Value * -ValueInternalArray::find( ArrayIndex index ) const -{ - if ( index >= size_ ) - return 0; - return &(pages_[index/itemsPerPage][index%itemsPerPage]); -} - -ValueInternalArray::ArrayIndex -ValueInternalArray::size() const -{ - return size_; -} - -int -ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) -{ - return indexOf(y) - indexOf(x); -} - - -ValueInternalArray::ArrayIndex -ValueInternalArray::indexOf( const IteratorState &iterator ) -{ - if ( !iterator.array_ ) - return ArrayIndex(-1); - return ArrayIndex( - (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage - + iterator.currentItemIndex_ ); -} - - -int -ValueInternalArray::compare( const ValueInternalArray &other ) const -{ - int sizeDiff( size_ - other.size_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - - for ( ArrayIndex index =0; index < size_; ++index ) - { - int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( - other.pages_[index/itemsPerPage][index%itemsPerPage] ); - if ( diff != 0 ) - return diff; - } - return 0; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl deleted file mode 100644 index f2fa160..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueInternalMap -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); - * This optimization is used by the fast allocator. - */ -ValueInternalLink::ValueInternalLink() - : previous_( 0 ) - , next_( 0 ) -{ -} - -ValueInternalLink::~ValueInternalLink() -{ - for ( int index =0; index < itemPerLink; ++index ) - { - if ( !items_[index].isItemAvailable() ) - { - if ( !items_[index].isMemberNameStatic() ) - free( keys_[index] ); - } - else - break; - } -} - - - -ValueMapAllocator::~ValueMapAllocator() -{ -} - -#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - return new ValueInternalMap(); - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - return new ValueInternalMap( other ); - } - - virtual void destructMap( ValueInternalMap *map ) - { - delete map; - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - return new ValueInternalLink(); - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - delete link; - } -}; -#else -/// @todo make this thread-safe (lock when accessign batch allocator) -class DefaultValueMapAllocator : public ValueMapAllocator -{ -public: // overridden from ValueMapAllocator - virtual ValueInternalMap *newMap() - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap(); // placement new - return map; - } - - virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) - { - ValueInternalMap *map = mapsAllocator_.allocate(); - new (map) ValueInternalMap( other ); // placement new - return map; - } - - virtual void destructMap( ValueInternalMap *map ) - { - if ( map ) - { - map->~ValueInternalMap(); - mapsAllocator_.release( map ); - } - } - - virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) - { - return new ValueInternalLink[size]; - } - - virtual void releaseMapBuckets( ValueInternalLink *links ) - { - delete [] links; - } - - virtual ValueInternalLink *allocateMapLink() - { - ValueInternalLink *link = linksAllocator_.allocate(); - memset( link, 0, sizeof(ValueInternalLink) ); - return link; - } - - virtual void releaseMapLink( ValueInternalLink *link ) - { - link->~ValueInternalLink(); - linksAllocator_.release( link ); - } -private: - BatchAllocator mapsAllocator_; - BatchAllocator linksAllocator_; -}; -#endif - -static ValueMapAllocator *&mapAllocator() -{ - static DefaultValueMapAllocator defaultAllocator; - static ValueMapAllocator *mapAllocator = &defaultAllocator; - return mapAllocator; -} - -static struct DummyMapAllocatorInitializer { - DummyMapAllocatorInitializer() - { - mapAllocator(); // ensure mapAllocator() statics are initialized before main(). - } -} dummyMapAllocatorInitializer; - - - -// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. - -/* -use linked list hash map. -buckets array is a container. -linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) -value have extra state: valid, available, deleted -*/ - - -ValueInternalMap::ValueInternalMap() - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ -} - - -ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) - : buckets_( 0 ) - , tailLink_( 0 ) - , bucketsSize_( 0 ) - , itemCount_( 0 ) -{ - reserve( other.itemCount_ ); - IteratorState it; - IteratorState itEnd; - other.makeBeginIterator( it ); - other.makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - bool isStatic; - const char *memberName = key( it, isStatic ); - const Value &aValue = value( it ); - resolveReference(memberName, isStatic) = aValue; - } -} - - -ValueInternalMap & -ValueInternalMap::operator =( const ValueInternalMap &other ) -{ - ValueInternalMap dummy( other ); - swap( dummy ); - return *this; -} - - -ValueInternalMap::~ValueInternalMap() -{ - if ( buckets_ ) - { - for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) - { - ValueInternalLink *link = buckets_[bucketIndex].next_; - while ( link ) - { - ValueInternalLink *linkToRelease = link; - link = link->next_; - mapAllocator()->releaseMapLink( linkToRelease ); - } - } - mapAllocator()->releaseMapBuckets( buckets_ ); - } -} - - -void -ValueInternalMap::swap( ValueInternalMap &other ) -{ - ValueInternalLink *tempBuckets = buckets_; - buckets_ = other.buckets_; - other.buckets_ = tempBuckets; - ValueInternalLink *tempTailLink = tailLink_; - tailLink_ = other.tailLink_; - other.tailLink_ = tempTailLink; - BucketIndex tempBucketsSize = bucketsSize_; - bucketsSize_ = other.bucketsSize_; - other.bucketsSize_ = tempBucketsSize; - BucketIndex tempItemCount = itemCount_; - itemCount_ = other.itemCount_; - other.itemCount_ = tempItemCount; -} - - -void -ValueInternalMap::clear() -{ - ValueInternalMap dummy; - swap( dummy ); -} - - -ValueInternalMap::BucketIndex -ValueInternalMap::size() const -{ - return itemCount_; -} - -bool -ValueInternalMap::reserveDelta( BucketIndex growth ) -{ - return reserve( itemCount_ + growth ); -} - -bool -ValueInternalMap::reserve( BucketIndex newItemCount ) -{ - if ( !buckets_ && newItemCount > 0 ) - { - buckets_ = mapAllocator()->allocateMapBuckets( 1 ); - bucketsSize_ = 1; - tailLink_ = &buckets_[0]; - } -// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; - return true; -} - - -const Value * -ValueInternalMap::find( const char *key ) const -{ - if ( !bucketsSize_ ) - return 0; - HashKey hashedKey = hash( key ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( const ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - current = current->next_ ) - { - for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return 0; - if ( strcmp( key, current->keys_[index] ) == 0 ) - return ¤t->items_[index]; - } - } - return 0; -} - - -Value * -ValueInternalMap::find( const char *key ) -{ - const ValueInternalMap *constThis = this; - return const_cast( constThis->find( key ) ); -} - - -Value & -ValueInternalMap::resolveReference( const char *key, - bool isStatic ) -{ - HashKey hashedKey = hash( key ); - if ( bucketsSize_ ) - { - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink **previous = 0; - BucketIndex index; - for ( ValueInternalLink *current = &buckets_[bucketIndex]; - current != 0; - previous = ¤t->next_, current = current->next_ ) - { - for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( current->items_[index].isItemAvailable() ) - return setNewItem( key, isStatic, current, index ); - if ( strcmp( key, current->keys_[index] ) == 0 ) - return current->items_[index]; - } - } - } - - reserveDelta( 1 ); - return unsafeAdd( key, isStatic, hashedKey ); -} - - -void -ValueInternalMap::remove( const char *key ) -{ - HashKey hashedKey = hash( key ); - if ( !bucketsSize_ ) - return; - BucketIndex bucketIndex = hashedKey % bucketsSize_; - for ( ValueInternalLink *link = &buckets_[bucketIndex]; - link != 0; - link = link->next_ ) - { - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - return; - if ( strcmp( key, link->keys_[index] ) == 0 ) - { - doActualRemove( link, index, bucketIndex ); - return; - } - } - } -} - -void -ValueInternalMap::doActualRemove( ValueInternalLink *link, - BucketIndex index, - BucketIndex bucketIndex ) -{ - // find last item of the bucket and swap it with the 'removed' one. - // set removed items flags to 'available'. - // if last page only contains 'available' items, then desallocate it (it's empty) - ValueInternalLink *&lastLink = getLastLinkInBucket( index ); - BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 - for ( ; - lastItemIndex < ValueInternalLink::itemPerLink; - ++lastItemIndex ) // may be optimized with dicotomic search - { - if ( lastLink->items_[lastItemIndex].isItemAvailable() ) - break; - } - - BucketIndex lastUsedIndex = lastItemIndex - 1; - Value *valueToDelete = &link->items_[index]; - Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; - if ( valueToDelete != valueToPreserve ) - valueToDelete->swap( *valueToPreserve ); - if ( lastUsedIndex == 0 ) // page is now empty - { // remove it from bucket linked list and delete it. - ValueInternalLink *linkPreviousToLast = lastLink->previous_; - if ( linkPreviousToLast != 0 ) // can not deleted bucket link. - { - mapAllocator()->releaseMapLink( lastLink ); - linkPreviousToLast->next_ = 0; - lastLink = linkPreviousToLast; - } - } - else - { - Value dummy; - valueToPreserve->swap( dummy ); // restore deleted to default Value. - valueToPreserve->setItemUsed( false ); - } - --itemCount_; -} - - -ValueInternalLink *& -ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) -{ - if ( bucketIndex == bucketsSize_ - 1 ) - return tailLink_; - ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; - if ( !previous ) - previous = &buckets_[bucketIndex]; - return previous; -} - - -Value & -ValueInternalMap::setNewItem( const char *key, - bool isStatic, - ValueInternalLink *link, - BucketIndex index ) -{ - char *duplicatedKey = makeMemberName( key ); - ++itemCount_; - link->keys_[index] = duplicatedKey; - link->items_[index].setItemUsed(); - link->items_[index].setMemberNameIsStatic( isStatic ); - return link->items_[index]; // items already default constructed. -} - - -Value & -ValueInternalMap::unsafeAdd( const char *key, - bool isStatic, - HashKey hashedKey ) -{ - JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); - BucketIndex bucketIndex = hashedKey % bucketsSize_; - ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); - ValueInternalLink *link = previousLink; - BucketIndex index; - for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) - { - if ( link->items_[index].isItemAvailable() ) - break; - } - if ( index == ValueInternalLink::itemPerLink ) // need to add a new page - { - ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); - index = 0; - link->next_ = newLink; - previousLink = newLink; - link = newLink; - } - return setNewItem( key, isStatic, link, index ); -} - - -ValueInternalMap::HashKey -ValueInternalMap::hash( const char *key ) const -{ - HashKey hash = 0; - while ( *key ) - hash += *key++ * 37; - return hash; -} - - -int -ValueInternalMap::compare( const ValueInternalMap &other ) const -{ - int sizeDiff( itemCount_ - other.itemCount_ ); - if ( sizeDiff != 0 ) - return sizeDiff; - // Strict order guaranty is required. Compare all keys FIRST, then compare values. - IteratorState it; - IteratorState itEnd; - makeBeginIterator( it ); - makeEndIterator( itEnd ); - for ( ; !equals(it,itEnd); increment(it) ) - { - if ( !other.find( key( it ) ) ) - return 1; - } - - // All keys are equals, let's compare values - makeBeginIterator( it ); - for ( ; !equals(it,itEnd); increment(it) ) - { - const Value *otherValue = other.find( key( it ) ); - int valueDiff = value(it).compare( *otherValue ); - if ( valueDiff != 0 ) - return valueDiff; - } - return 0; -} - - -void -ValueInternalMap::makeBeginIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = 0; - it.itemIndex_ = 0; - it.link_ = buckets_; -} - - -void -ValueInternalMap::makeEndIterator( IteratorState &it ) const -{ - it.map_ = const_cast( this ); - it.bucketIndex_ = bucketsSize_; - it.itemIndex_ = 0; - it.link_ = 0; -} - - -bool -ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) -{ - return x.map_ == other.map_ - && x.bucketIndex_ == other.bucketIndex_ - && x.link_ == other.link_ - && x.itemIndex_ == other.itemIndex_; -} - - -void -ValueInternalMap::incrementBucket( IteratorState &iterator ) -{ - ++iterator.bucketIndex_; - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) - iterator.link_ = 0; - else - iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); - iterator.itemIndex_ = 0; -} - - -void -ValueInternalMap::increment( IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); - ++iterator.itemIndex_; - if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) - { - JSON_ASSERT_MESSAGE( iterator.link_ != 0, - "ValueInternalMap::increment(): attempting to iterate beyond end." ); - iterator.link_ = iterator.link_->next_; - if ( iterator.link_ == 0 ) - incrementBucket( iterator ); - } - else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) - { - incrementBucket( iterator ); - } -} - - -void -ValueInternalMap::decrement( IteratorState &iterator ) -{ - if ( iterator.itemIndex_ == 0 ) - { - JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); - if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) - { - JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); - --(iterator.bucketIndex_); - } - iterator.link_ = iterator.link_->previous_; - iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; - } -} - - -const char * -ValueInternalMap::key( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->keys_[iterator.itemIndex_]; -} - -const char * -ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); - return iterator.link_->keys_[iterator.itemIndex_]; -} - - -Value & -ValueInternalMap::value( const IteratorState &iterator ) -{ - JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); - return iterator.link_->items_[iterator.itemIndex_]; -} - - -int -ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) -{ - int offset = 0; - IteratorState it = x; - while ( !equals( it, y ) ) - increment( it ); - return offset; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp deleted file mode 100644 index 8bb0304..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp +++ /dev/null @@ -1,880 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGAMATION) -# include -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGAMATION) -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -// Implementation of class Features -// //////////////////////////////// - -Features::Features() - : allowComments_( true ) - , strictRoot_( false ) -{ -} - - -Features -Features::all() -{ - return Features(); -} - - -Features -Features::strictMode() -{ - Features features; - features.allowComments_ = false; - features.strictRoot_ = true; - return features; -} - -// Implementation of class Reader -// //////////////////////////////// - - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4; -} - -static inline bool -in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) -{ - return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; -} - - -static bool -containsNewLine( Reader::Location begin, - Reader::Location end ) -{ - for ( ;begin < end; ++begin ) - if ( *begin == '\n' || *begin == '\r' ) - return true; - return false; -} - - -// Class Reader -// ////////////////////////////////////////////////////////////////// - -Reader::Reader() - : features_( Features::all() ) -{ -} - - -Reader::Reader( const Features &features ) - : features_( features ) -{ -} - - -bool -Reader::parse( const std::string &document, - Value &root, - bool collectComments ) -{ - document_ = document; - const char *begin = document_.c_str(); - const char *end = begin + document_.length(); - return parse( begin, end, root, collectComments ); -} - - -bool -Reader::parse( std::istream& sin, - Value &root, - bool collectComments ) -{ - //std::istream_iterator begin(sin); - //std::istream_iterator end; - // Those would allow streamed input from a file, if parse() were a - // template function. - - // Since std::string is reference-counted, this at least does not - // create an extra copy. - std::string doc; - std::getline(sin, doc, (char)EOF); - return parse( doc, root, collectComments ); -} - -bool -Reader::parse( const char *beginDoc, const char *endDoc, - Value &root, - bool collectComments ) -{ - if ( !features_.allowComments_ ) - { - collectComments = false; - } - - begin_ = beginDoc; - end_ = endDoc; - collectComments_ = collectComments; - current_ = begin_; - lastValueEnd_ = 0; - lastValue_ = 0; - commentsBefore_ = ""; - errors_.clear(); - while ( !nodes_.empty() ) - nodes_.pop(); - nodes_.push( &root ); - - bool successful = readValue(); - Token token; - skipCommentTokens( token ); - if ( collectComments_ && !commentsBefore_.empty() ) - root.setComment( commentsBefore_, commentAfter ); - if ( features_.strictRoot_ ) - { - if ( !root.isArray() && !root.isObject() ) - { - // Set error location to start of doc, ideally should be first token found in doc - token.type_ = tokenError; - token.start_ = beginDoc; - token.end_ = endDoc; - addError( "A valid JSON document must be either an array or an object value.", - token ); - return false; - } - } - return successful; -} - - -bool -Reader::readValue() -{ - Token token; - skipCommentTokens( token ); - bool successful = true; - - if ( collectComments_ && !commentsBefore_.empty() ) - { - currentValue().setComment( commentsBefore_, commentBefore ); - commentsBefore_ = ""; - } - - - switch ( token.type_ ) - { - case tokenObjectBegin: - successful = readObject( token ); - break; - case tokenArrayBegin: - successful = readArray( token ); - break; - case tokenNumber: - successful = decodeNumber( token ); - break; - case tokenString: - successful = decodeString( token ); - break; - case tokenTrue: - currentValue() = true; - break; - case tokenFalse: - currentValue() = false; - break; - case tokenNull: - currentValue() = Value(); - break; - default: - return addError( "Syntax error: value, object or array expected.", token ); - } - - if ( collectComments_ ) - { - lastValueEnd_ = current_; - lastValue_ = ¤tValue(); - } - - return successful; -} - - -void -Reader::skipCommentTokens( Token &token ) -{ - if ( features_.allowComments_ ) - { - do - { - readToken( token ); - } - while ( token.type_ == tokenComment ); - } - else - { - readToken( token ); - } -} - - -bool -Reader::expectToken( TokenType type, Token &token, const char *message ) -{ - readToken( token ); - if ( token.type_ != type ) - return addError( message, token ); - return true; -} - - -bool -Reader::readToken( Token &token ) -{ - skipSpaces(); - token.start_ = current_; - Char c = getNextChar(); - bool ok = true; - switch ( c ) - { - case '{': - token.type_ = tokenObjectBegin; - break; - case '}': - token.type_ = tokenObjectEnd; - break; - case '[': - token.type_ = tokenArrayBegin; - break; - case ']': - token.type_ = tokenArrayEnd; - break; - case '"': - token.type_ = tokenString; - ok = readString(); - break; - case '/': - token.type_ = tokenComment; - ok = readComment(); - break; - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - case '-': - token.type_ = tokenNumber; - readNumber(); - break; - case 't': - token.type_ = tokenTrue; - ok = match( "rue", 3 ); - break; - case 'f': - token.type_ = tokenFalse; - ok = match( "alse", 4 ); - break; - case 'n': - token.type_ = tokenNull; - ok = match( "ull", 3 ); - break; - case ',': - token.type_ = tokenArraySeparator; - break; - case ':': - token.type_ = tokenMemberSeparator; - break; - case 0: - token.type_ = tokenEndOfStream; - break; - default: - ok = false; - break; - } - if ( !ok ) - token.type_ = tokenError; - token.end_ = current_; - return true; -} - - -void -Reader::skipSpaces() -{ - while ( current_ != end_ ) - { - Char c = *current_; - if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) - ++current_; - else - break; - } -} - - -bool -Reader::match( Location pattern, - int patternLength ) -{ - if ( end_ - current_ < patternLength ) - return false; - int index = patternLength; - while ( index-- ) - if ( current_[index] != pattern[index] ) - return false; - current_ += patternLength; - return true; -} - - -bool -Reader::readComment() -{ - Location commentBegin = current_ - 1; - Char c = getNextChar(); - bool successful = false; - if ( c == '*' ) - successful = readCStyleComment(); - else if ( c == '/' ) - successful = readCppStyleComment(); - if ( !successful ) - return false; - - if ( collectComments_ ) - { - CommentPlacement placement = commentBefore; - if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) - { - if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) - placement = commentAfterOnSameLine; - } - - addComment( commentBegin, current_, placement ); - } - return true; -} - - -void -Reader::addComment( Location begin, - Location end, - CommentPlacement placement ) -{ - assert( collectComments_ ); - if ( placement == commentAfterOnSameLine ) - { - assert( lastValue_ != 0 ); - lastValue_->setComment( std::string( begin, end ), placement ); - } - else - { - if ( !commentsBefore_.empty() ) - commentsBefore_ += "\n"; - commentsBefore_ += std::string( begin, end ); - } -} - - -bool -Reader::readCStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '*' && *current_ == '/' ) - break; - } - return getNextChar() == '/'; -} - - -bool -Reader::readCppStyleComment() -{ - while ( current_ != end_ ) - { - Char c = getNextChar(); - if ( c == '\r' || c == '\n' ) - break; - } - return true; -} - - -void -Reader::readNumber() -{ - while ( current_ != end_ ) - { - if ( !(*current_ >= '0' && *current_ <= '9') && - !in( *current_, '.', 'e', 'E', '+', '-' ) ) - break; - ++current_; - } -} - -bool -Reader::readString() -{ - Char c = 0; - while ( current_ != end_ ) - { - c = getNextChar(); - if ( c == '\\' ) - getNextChar(); - else if ( c == '"' ) - break; - } - return c == '"'; -} - - -bool -Reader::readObject( Token &/*tokenStart*/ ) -{ - Token tokenName; - std::string name; - currentValue() = Value( objectValue ); - while ( readToken( tokenName ) ) - { - bool initialTokenOk = true; - while ( tokenName.type_ == tokenComment && initialTokenOk ) - initialTokenOk = readToken( tokenName ); - if ( !initialTokenOk ) - break; - if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object - return true; - if ( tokenName.type_ != tokenString ) - break; - - name = ""; - if ( !decodeString( tokenName, name ) ) - return recoverFromError( tokenObjectEnd ); - - Token colon; - if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) - { - return addErrorAndRecover( "Missing ':' after object member name", - colon, - tokenObjectEnd ); - } - Value &value = currentValue()[ name ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenObjectEnd ); - - Token comma; - if ( !readToken( comma ) - || ( comma.type_ != tokenObjectEnd && - comma.type_ != tokenArraySeparator && - comma.type_ != tokenComment ) ) - { - return addErrorAndRecover( "Missing ',' or '}' in object declaration", - comma, - tokenObjectEnd ); - } - bool finalizeTokenOk = true; - while ( comma.type_ == tokenComment && - finalizeTokenOk ) - finalizeTokenOk = readToken( comma ); - if ( comma.type_ == tokenObjectEnd ) - return true; - } - return addErrorAndRecover( "Missing '}' or object member name", - tokenName, - tokenObjectEnd ); -} - - -bool -Reader::readArray( Token &/*tokenStart*/ ) -{ - currentValue() = Value( arrayValue ); - skipSpaces(); - if ( *current_ == ']' ) // empty array - { - Token endArray; - readToken( endArray ); - return true; - } - int index = 0; - for (;;) - { - Value &value = currentValue()[ index++ ]; - nodes_.push( &value ); - bool ok = readValue(); - nodes_.pop(); - if ( !ok ) // error already set - return recoverFromError( tokenArrayEnd ); - - Token token; - // Accept Comment after last item in the array. - ok = readToken( token ); - while ( token.type_ == tokenComment && ok ) - { - ok = readToken( token ); - } - bool badTokenType = ( token.type_ != tokenArraySeparator && - token.type_ != tokenArrayEnd ); - if ( !ok || badTokenType ) - { - return addErrorAndRecover( "Missing ',' or ']' in array declaration", - token, - tokenArrayEnd ); - } - if ( token.type_ == tokenArrayEnd ) - break; - } - return true; -} - - -bool -Reader::decodeNumber( Token &token ) -{ - bool isDouble = false; - for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) - { - isDouble = isDouble - || in( *inspect, '.', 'e', 'E', '+' ) - || ( *inspect == '-' && inspect != token.start_ ); - } - if ( isDouble ) - return decodeDouble( token ); - // Attempts to parse the number as an integer. If the number is - // larger than the maximum supported value of an integer then - // we decode the number as a double. - Location current = token.start_; - bool isNegative = *current == '-'; - if ( isNegative ) - ++current; - Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) - : Value::maxLargestUInt; - Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); - Value::LargestUInt value = 0; - while ( current < token.end_ ) - { - Char c = *current++; - if ( c < '0' || c > '9' ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - Value::UInt digit(c - '0'); - if ( value >= threshold ) - { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) - { - return decodeDouble( token ); - } - } - value = value * 10 + digit; - } - if ( isNegative ) - currentValue() = -Value::LargestInt( value ); - else if ( value <= Value::LargestUInt(Value::maxInt) ) - currentValue() = Value::LargestInt( value ); - else - currentValue() = value; - return true; -} - - -bool -Reader::decodeDouble( Token &token ) -{ - double value = 0; - const int bufferSize = 32; - int count; - int length = int(token.end_ - token.start_); - if ( length <= bufferSize ) - { - Char buffer[bufferSize+1]; - memcpy( buffer, token.start_, length ); - buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); - } - else - { - std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); - } - - if ( count != 1 ) - return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); - currentValue() = value; - return true; -} - - -bool -Reader::decodeString( Token &token ) -{ - std::string decoded; - if ( !decodeString( token, decoded ) ) - return false; - currentValue() = decoded; - return true; -} - - -bool -Reader::decodeString( Token &token, std::string &decoded ) -{ - decoded.reserve( token.end_ - token.start_ - 2 ); - Location current = token.start_ + 1; // skip '"' - Location end = token.end_ - 1; // do not include '"' - while ( current != end ) - { - Char c = *current++; - if ( c == '"' ) - break; - else if ( c == '\\' ) - { - if ( current == end ) - return addError( "Empty escape sequence in string", token, current ); - Char escape = *current++; - switch ( escape ) - { - case '"': decoded += '"'; break; - case '/': decoded += '/'; break; - case '\\': decoded += '\\'; break; - case 'b': decoded += '\b'; break; - case 'f': decoded += '\f'; break; - case 'n': decoded += '\n'; break; - case 'r': decoded += '\r'; break; - case 't': decoded += '\t'; break; - case 'u': - { - unsigned int unicode; - if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) - return false; - decoded += codePointToUTF8(unicode); - } - break; - default: - return addError( "Bad escape sequence in string", token, current ); - } - } - else - { - decoded += c; - } - } - return true; -} - -bool -Reader::decodeUnicodeCodePoint( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - - if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) - return false; - if (unicode >= 0xD800 && unicode <= 0xDBFF) - { - // surrogate pairs - if (end - current < 6) - return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); - unsigned int surrogatePair; - if (*(current++) == '\\' && *(current++)== 'u') - { - if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) - { - unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); - } - else - return false; - } - else - return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); - } - return true; -} - -bool -Reader::decodeUnicodeEscapeSequence( Token &token, - Location ¤t, - Location end, - unsigned int &unicode ) -{ - if ( end - current < 4 ) - return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); - unicode = 0; - for ( int index =0; index < 4; ++index ) - { - Char c = *current++; - unicode *= 16; - if ( c >= '0' && c <= '9' ) - unicode += c - '0'; - else if ( c >= 'a' && c <= 'f' ) - unicode += c - 'a' + 10; - else if ( c >= 'A' && c <= 'F' ) - unicode += c - 'A' + 10; - else - return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); - } - return true; -} - - -bool -Reader::addError( const std::string &message, - Token &token, - Location extra ) -{ - ErrorInfo info; - info.token_ = token; - info.message_ = message; - info.extra_ = extra; - errors_.push_back( info ); - return false; -} - - -bool -Reader::recoverFromError( TokenType skipUntilToken ) -{ - int errorCount = int(errors_.size()); - Token skip; - for (;;) - { - if ( !readToken(skip) ) - errors_.resize( errorCount ); // discard errors caused by recovery - if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) - break; - } - errors_.resize( errorCount ); - return false; -} - - -bool -Reader::addErrorAndRecover( const std::string &message, - Token &token, - TokenType skipUntilToken ) -{ - addError( message, token ); - return recoverFromError( skipUntilToken ); -} - - -Value & -Reader::currentValue() -{ - return *(nodes_.top()); -} - - -Reader::Char -Reader::getNextChar() -{ - if ( current_ == end_ ) - return 0; - return *current_++; -} - - -void -Reader::getLocationLineAndColumn( Location location, - int &line, - int &column ) const -{ - Location current = begin_; - Location lastLineStart = current; - line = 0; - while ( current < location && current != end_ ) - { - Char c = *current++; - if ( c == '\r' ) - { - if ( *current == '\n' ) - ++current; - lastLineStart = current; - ++line; - } - else if ( c == '\n' ) - { - lastLineStart = current; - ++line; - } - } - // column & line start at 1 - column = int(location - lastLineStart) + 1; - ++line; -} - - -std::string -Reader::getLocationLineAndColumn( Location location ) const -{ - int line, column; - getLocationLineAndColumn( location, line, column ); - char buffer[18+16+16+1]; - sprintf( buffer, "Line %d, Column %d", line, column ); - return buffer; -} - - -// Deprecated. Preserved for backward compatibility -std::string -Reader::getFormatedErrorMessages() const -{ - return getFormattedErrorMessages(); -} - - -std::string -Reader::getFormattedErrorMessages() const -{ - std::string formattedMessage; - for ( Errors::const_iterator itError = errors_.begin(); - itError != errors_.end(); - ++itError ) - { - const ErrorInfo &error = *itError; - formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; - formattedMessage += " " + error.message_ + "\n"; - if ( error.extra_ ) - formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; - } - return formattedMessage; -} - - -std::istream& operator>>( std::istream &sin, Value &root ) -{ - Json::Reader reader; - bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); - return sin; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h deleted file mode 100644 index 658031b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED -# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED - -/* This header provides common string manipulation support, such as UTF-8, - * portable conversion from/to string... - * - * It is an internal header that must not be exposed. - */ - -namespace Json { - -/// Converts a unicode code-point to UTF-8. -static inline std::string -codePointToUTF8(unsigned int cp) -{ - std::string result; - - // based on description from http://en.wikipedia.org/wiki/UTF-8 - - if (cp <= 0x7f) - { - result.resize(1); - result[0] = static_cast(cp); - } - else if (cp <= 0x7FF) - { - result.resize(2); - result[1] = static_cast(0x80 | (0x3f & cp)); - result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); - } - else if (cp <= 0xFFFF) - { - result.resize(3); - result[2] = static_cast(0x80 | (0x3f & cp)); - result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); - result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); - } - else if (cp <= 0x10FFFF) - { - result.resize(4); - result[3] = static_cast(0x80 | (0x3f & cp)); - result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); - result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); - result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); - } - - return result; -} - - -/// Returns true if ch is a control character (in range [0,32[). -static inline bool -isControlCharacter(char ch) -{ - return ch > 0 && ch <= 0x1F; -} - - -enum { - /// Constant that specify the size of the buffer that must be passed to uintToString. - uintToStringBufferSize = 3*sizeof(LargestUInt)+1 -}; - -// Defines a char buffer for use with uintToString(). -typedef char UIntToStringBuffer[uintToStringBufferSize]; - - -/** Converts an unsigned integer to string. - * @param value Unsigned interger to convert to string - * @param current Input/Output string buffer. - * Must have at least uintToStringBufferSize chars free. - */ -static inline void -uintToString( LargestUInt value, - char *¤t ) -{ - *--current = 0; - do - { - *--current = char(value % 10) + '0'; - value /= 10; - } - while ( value != 0 ); -} - -} // namespace Json { - -#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp deleted file mode 100644 index ff98f63..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp +++ /dev/null @@ -1,1829 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGAMATION) -# include -# include -# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -# include "json_batchallocator.h" -# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR -#endif // if !defined(JSON_IS_AMALGAMATION) -#include -#include -#include -#include -#include -#ifdef JSON_USE_CPPTL -# include -#endif -#include // size_t - -#define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) JSON_FAIL_MESSAGE( message ) - -namespace Json { - -const Value Value::null; -const Int Value::minInt = Int( ~(UInt(-1)/2) ); -const Int Value::maxInt = Int( UInt(-1)/2 ); -const UInt Value::maxUInt = UInt(-1); -const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); -const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); -const UInt64 Value::maxUInt64 = UInt64(-1); -const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); -const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); -const LargestUInt Value::maxLargestUInt = LargestUInt(-1); - - -/// Unknown size marker -static const unsigned int unknown = (unsigned)-1; - - -/** Duplicates the specified string value. - * @param value Pointer to the string to duplicate. Must be zero-terminated if - * length is "unknown". - * @param length Length of the value. if equals to unknown, then it will be - * computed using strlen(value). - * @return Pointer on the duplicate instance of string. - */ -static inline char * -duplicateStringValue( const char *value, - unsigned int length = unknown ) -{ - if ( length == unknown ) - length = (unsigned int)strlen(value); - char *newString = static_cast( malloc( length + 1 ) ); - JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); - memcpy( newString, value, length ); - newString[length] = 0; - return newString; -} - - -/** Free the string duplicated by duplicateStringValue(). - */ -static inline void -releaseStringValue( char *value ) -{ - if ( value ) - free( value ); -} - -} // namespace Json - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ValueInternals... -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -#if !defined(JSON_IS_AMALGAMATION) -# ifdef JSON_VALUE_USE_INTERNAL_MAP -# include "json_internalarray.inl" -# include "json_internalmap.inl" -# endif // JSON_VALUE_USE_INTERNAL_MAP - -# include "json_valueiterator.inl" -#endif // if !defined(JSON_IS_AMALGAMATION) - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CommentInfo -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -Value::CommentInfo::CommentInfo() - : comment_( 0 ) -{ -} - -Value::CommentInfo::~CommentInfo() -{ - if ( comment_ ) - releaseStringValue( comment_ ); -} - - -void -Value::CommentInfo::setComment( const char *text ) -{ - if ( comment_ ) - releaseStringValue( comment_ ); - JSON_ASSERT( text != 0 ); - JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); - // It seems that /**/ style comments are acceptable as well. - comment_ = duplicateStringValue( text ); -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::CZString -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -# ifndef JSON_VALUE_USE_INTERNAL_MAP - -// Notes: index_ indicates if the string was allocated when -// a string is stored. - -Value::CZString::CZString( ArrayIndex index ) - : cstr_( 0 ) - , index_( index ) -{ -} - -Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) - : cstr_( allocate == duplicate ? duplicateStringValue(cstr) - : cstr ) - , index_( allocate ) -{ -} - -Value::CZString::CZString( const CZString &other ) -: cstr_( other.index_ != noDuplication && other.cstr_ != 0 - ? duplicateStringValue( other.cstr_ ) - : other.cstr_ ) - , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) - : other.index_ ) -{ -} - -Value::CZString::~CZString() -{ - if ( cstr_ && index_ == duplicate ) - releaseStringValue( const_cast( cstr_ ) ); -} - -void -Value::CZString::swap( CZString &other ) -{ - std::swap( cstr_, other.cstr_ ); - std::swap( index_, other.index_ ); -} - -Value::CZString & -Value::CZString::operator =( const CZString &other ) -{ - CZString temp( other ); - swap( temp ); - return *this; -} - -bool -Value::CZString::operator<( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) < 0; - return index_ < other.index_; -} - -bool -Value::CZString::operator==( const CZString &other ) const -{ - if ( cstr_ ) - return strcmp( cstr_, other.cstr_ ) == 0; - return index_ == other.index_; -} - - -ArrayIndex -Value::CZString::index() const -{ - return index_; -} - - -const char * -Value::CZString::c_str() const -{ - return cstr_; -} - -bool -Value::CZString::isStaticString() const -{ - return index_ == noDuplication; -} - -#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class Value::Value -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -/*! \internal Default constructor initialization must be equivalent to: - * memset( this, 0, sizeof(Value) ) - * This optimization is used in ValueInternalMap fast allocator. - */ -Value::Value( ValueType type ) - : type_( type ) - , allocated_( 0 ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type ) - { - case nullValue: - break; - case intValue: - case uintValue: - value_.int_ = 0; - break; - case realValue: - value_.real_ = 0.0; - break; - case stringValue: - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues(); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArray(); - break; - case objectValue: - value_.map_ = mapAllocator()->newMap(); - break; -#endif - case booleanValue: - value_.bool_ = false; - break; - default: - JSON_ASSERT_UNREACHABLE; - } -} - - -#if defined(JSON_HAS_INT64) -Value::Value( UInt value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( Int value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - -#endif // if defined(JSON_HAS_INT64) - - -Value::Value( Int64 value ) - : type_( intValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.int_ = value; -} - - -Value::Value( UInt64 value ) - : type_( uintValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.uint_ = value; -} - -Value::Value( double value ) - : type_( realValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.real_ = value; -} - -Value::Value( const char *value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value ); -} - - -Value::Value( const char *beginValue, - const char *endValue ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( beginValue, - (unsigned int)(endValue - beginValue) ); -} - - -Value::Value( const std::string &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value.c_str(), - (unsigned int)value.length() ); - -} - -Value::Value( const StaticString &value ) - : type_( stringValue ) - , allocated_( false ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = const_cast( value.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -Value::Value( const CppTL::ConstString &value ) - : type_( stringValue ) - , allocated_( true ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.string_ = duplicateStringValue( value, value.length() ); -} -# endif - -Value::Value( bool value ) - : type_( booleanValue ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - value_.bool_ = value; -} - - -Value::Value( const Value &other ) - : type_( other.type_ ) - , comments_( 0 ) -# ifdef JSON_VALUE_USE_INTERNAL_MAP - , itemIsUsed_( 0 ) -#endif -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - value_ = other.value_; - break; - case stringValue: - if ( other.value_.string_ ) - { - value_.string_ = duplicateStringValue( other.value_.string_ ); - allocated_ = true; - } - else - value_.string_ = 0; - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_ = new ObjectValues( *other.value_.map_ ); - break; -#else - case arrayValue: - value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); - break; - case objectValue: - value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - if ( other.comments_ ) - { - comments_ = new CommentInfo[numberOfCommentPlacement]; - for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) - { - const CommentInfo &otherComment = other.comments_[comment]; - if ( otherComment.comment_ ) - comments_[comment].setComment( otherComment.comment_ ); - } - } -} - - -Value::~Value() -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - break; - case stringValue: - if ( allocated_ ) - releaseStringValue( value_.string_ ); - break; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - delete value_.map_; - break; -#else - case arrayValue: - arrayAllocator()->destructArray( value_.array_ ); - break; - case objectValue: - mapAllocator()->destructMap( value_.map_ ); - break; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - - if ( comments_ ) - delete[] comments_; -} - -Value & -Value::operator=( const Value &other ) -{ - Value temp( other ); - swap( temp ); - return *this; -} - -void -Value::swap( Value &other ) -{ - ValueType temp = type_; - type_ = other.type_; - other.type_ = temp; - std::swap( value_, other.value_ ); - int temp2 = allocated_; - allocated_ = other.allocated_; - other.allocated_ = temp2; -} - -ValueType -Value::type() const -{ - return type_; -} - - -int -Value::compare( const Value &other ) const -{ - if ( *this < other ) - return -1; - if ( *this > other ) - return 1; - return 0; -} - - -bool -Value::operator <( const Value &other ) const -{ - int typeDelta = type_ - other.type_; - if ( typeDelta ) - return typeDelta < 0 ? true : false; - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - return value_.int_ < other.value_.int_; - case uintValue: - return value_.uint_ < other.value_.uint_; - case realValue: - return value_.real_ < other.value_.real_; - case booleanValue: - return value_.bool_ < other.value_.bool_; - case stringValue: - return ( value_.string_ == 0 && other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) < 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - { - int delta = int( value_.map_->size() - other.value_.map_->size() ); - if ( delta ) - return delta < 0; - return (*value_.map_) < (*other.value_.map_); - } -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) < 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) < 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable -} - -bool -Value::operator <=( const Value &other ) const -{ - return !(other < *this); -} - -bool -Value::operator >=( const Value &other ) const -{ - return !(*this < other); -} - -bool -Value::operator >( const Value &other ) const -{ - return other < *this; -} - -bool -Value::operator ==( const Value &other ) const -{ - //if ( type_ != other.type_ ) - // GCC 2.95.3 says: - // attempt to take address of bit-field structure member `Json::Value::type_' - // Beats me, but a temp solves the problem. - int temp = other.type_; - if ( type_ != temp ) - return false; - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return value_.int_ == other.value_.int_; - case uintValue: - return value_.uint_ == other.value_.uint_; - case realValue: - return value_.real_ == other.value_.real_; - case booleanValue: - return value_.bool_ == other.value_.bool_; - case stringValue: - return ( value_.string_ == other.value_.string_ ) - || ( other.value_.string_ - && value_.string_ - && strcmp( value_.string_, other.value_.string_ ) == 0 ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - return value_.map_->size() == other.value_.map_->size() - && (*value_.map_) == (*other.value_.map_); -#else - case arrayValue: - return value_.array_->compare( *(other.value_.array_) ) == 0; - case objectValue: - return value_.map_->compare( *(other.value_.map_) ) == 0; -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable -} - -bool -Value::operator !=( const Value &other ) const -{ - return !( *this == other ); -} - -const char * -Value::asCString() const -{ - JSON_ASSERT( type_ == stringValue ); - return value_.string_; -} - - -std::string -Value::asString() const -{ - switch ( type_ ) - { - case nullValue: - return ""; - case stringValue: - return value_.string_ ? value_.string_ : ""; - case booleanValue: - return value_.bool_ ? "true" : "false"; - case intValue: - case uintValue: - case realValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to string" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return ""; // unreachable -} - -# ifdef JSON_USE_CPPTL -CppTL::ConstString -Value::asConstString() const -{ - return CppTL::ConstString( asString().c_str() ); -} -# endif - - -Value::Int -Value::asInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); - return Int(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); - return Int(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to int" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt -Value::asUInt() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); - return UInt(value_.int_); - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); - return UInt(value_.uint_); - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -# if defined(JSON_HAS_INT64) - -Value::Int64 -Value::asInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - return value_.int_; - case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to Int64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -Value::UInt64 -Value::asUInt64() const -{ - switch ( type_ ) - { - case nullValue: - return 0; - case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; - case uintValue: - return value_.uint_; - case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to UInt64" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} -# endif // if defined(JSON_HAS_INT64) - - -LargestInt -Value::asLargestInt() const -{ -#if defined(JSON_NO_INT64) - return asInt(); -#else - return asInt64(); -#endif -} - - -LargestUInt -Value::asLargestUInt() const -{ -#if defined(JSON_NO_INT64) - return asUInt(); -#else - return asUInt64(); -#endif -} - - -double -Value::asDouble() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return value_.real_; - case booleanValue: - return value_.bool_ ? 1.0 : 0.0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to double" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - -float -Value::asFloat() const -{ - switch ( type_ ) - { - case nullValue: - return 0.0f; - case intValue: - return static_cast( value_.int_ ); - case uintValue: -#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( value_.uint_ ); -#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); -#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - case realValue: - return static_cast( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to float" ); - default: - JSON_ASSERT_UNREACHABLE; - } - return 0.0f; // unreachable; -} - -bool -Value::asBool() const -{ - switch ( type_ ) - { - case nullValue: - return false; - case intValue: - case uintValue: - return value_.int_ != 0; - case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; - case stringValue: - return value_.string_ && value_.string_[0] != 0; - case arrayValue: - case objectValue: - return value_.map_->size() != 0; - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -bool -Value::isConvertibleTo( ValueType other ) const -{ - switch ( type_ ) - { - case nullValue: - return true; - case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; - case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; - case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; - case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); - case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); - case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); - default: - JSON_ASSERT_UNREACHABLE; - } - return false; // unreachable; -} - - -/// Number of values in array or object -ArrayIndex -Value::size() const -{ - switch ( type_ ) - { - case nullValue: - case intValue: - case uintValue: - case realValue: - case booleanValue: - case stringValue: - return 0; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: // size of the array is highest index + 1 - if ( !value_.map_->empty() ) - { - ObjectValues::const_iterator itLast = value_.map_->end(); - --itLast; - return (*itLast).first.index()+1; - } - return 0; - case objectValue: - return ArrayIndex( value_.map_->size() ); -#else - case arrayValue: - return Int( value_.array_->size() ); - case objectValue: - return Int( value_.map_->size() ); -#endif - default: - JSON_ASSERT_UNREACHABLE; - } - return 0; // unreachable; -} - - -bool -Value::empty() const -{ - if ( isNull() || isArray() || isObject() ) - return size() == 0u; - else - return false; -} - - -bool -Value::operator!() const -{ - return isNull(); -} - - -void -Value::clear() -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); - - switch ( type_ ) - { -#ifndef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - case objectValue: - value_.map_->clear(); - break; -#else - case arrayValue: - value_.array_->clear(); - break; - case objectValue: - value_.map_->clear(); - break; -#endif - default: - break; - } -} - -void -Value::resize( ArrayIndex newSize ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ArrayIndex oldSize = size(); - if ( newSize == 0 ) - clear(); - else if ( newSize > oldSize ) - (*this)[ newSize - 1 ]; - else - { - for ( ArrayIndex index = newSize; index < oldSize; ++index ) - { - value_.map_->erase( index ); - } - assert( size() == newSize ); - } -#else - value_.array_->resize( newSize ); -#endif -} - - -Value & -Value::operator[]( ArrayIndex index ) -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - *this = Value( arrayValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::iterator it = value_.map_->lower_bound( key ); - if ( it != value_.map_->end() && (*it).first == key ) - return (*it).second; - - ObjectValues::value_type defaultValue( key, null ); - it = value_.map_->insert( it, defaultValue ); - return (*it).second; -#else - return value_.array_->resolveReference( index ); -#endif -} - - -Value & -Value::operator[]( int index ) -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -const Value & -Value::operator[]( ArrayIndex index ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString key( index ); - ObjectValues::const_iterator it = value_.map_->find( key ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - Value *value = value_.array_->find( index ); - return value ? *value : null; -#endif -} - - -const Value & -Value::operator[]( int index ) const -{ - JSON_ASSERT( index >= 0 ); - return (*this)[ ArrayIndex(index) ]; -} - - -Value & -Value::operator[]( const char *key ) -{ - return resolveReference( key, false ); -} - - -Value & -Value::resolveReference( const char *key, - bool isStatic ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - *this = Value( objectValue ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, isStatic ? CZString::noDuplication - : CZString::duplicateOnCopy ); - ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); - if ( it != value_.map_->end() && (*it).first == actualKey ) - return (*it).second; - - ObjectValues::value_type defaultValue( actualKey, null ); - it = value_.map_->insert( it, defaultValue ); - Value &value = (*it).second; - return value; -#else - return value_.map_->resolveReference( key, isStatic ); -#endif -} - - -Value -Value::get( ArrayIndex index, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[index]); - return value == &null ? defaultValue : *value; -} - - -bool -Value::isValidIndex( ArrayIndex index ) const -{ - return index < size(); -} - - - -const Value & -Value::operator[]( const char *key ) const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::const_iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - return (*it).second; -#else - const Value *value = value_.map_->find( key ); - return value ? *value : null; -#endif -} - - -Value & -Value::operator[]( const std::string &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const std::string &key ) const -{ - return (*this)[ key.c_str() ]; -} - -Value & -Value::operator[]( const StaticString &key ) -{ - return resolveReference( key, true ); -} - - -# ifdef JSON_USE_CPPTL -Value & -Value::operator[]( const CppTL::ConstString &key ) -{ - return (*this)[ key.c_str() ]; -} - - -const Value & -Value::operator[]( const CppTL::ConstString &key ) const -{ - return (*this)[ key.c_str() ]; -} -# endif - - -Value & -Value::append( const Value &value ) -{ - return (*this)[size()] = value; -} - - -Value -Value::get( const char *key, - const Value &defaultValue ) const -{ - const Value *value = &((*this)[key]); - return value == &null ? defaultValue : *value; -} - - -Value -Value::get( const std::string &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} - -Value -Value::removeMember( const char* key ) -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return null; -#ifndef JSON_VALUE_USE_INTERNAL_MAP - CZString actualKey( key, CZString::noDuplication ); - ObjectValues::iterator it = value_.map_->find( actualKey ); - if ( it == value_.map_->end() ) - return null; - Value old(it->second); - value_.map_->erase(it); - return old; -#else - Value *value = value_.map_->find( key ); - if (value){ - Value old(*value); - value_.map_.remove( key ); - return old; - } else { - return null; - } -#endif -} - -Value -Value::removeMember( const std::string &key ) -{ - return removeMember( key.c_str() ); -} - -# ifdef JSON_USE_CPPTL -Value -Value::get( const CppTL::ConstString &key, - const Value &defaultValue ) const -{ - return get( key.c_str(), defaultValue ); -} -# endif - -bool -Value::isMember( const char *key ) const -{ - const Value *value = &((*this)[key]); - return value != &null; -} - - -bool -Value::isMember( const std::string &key ) const -{ - return isMember( key.c_str() ); -} - - -# ifdef JSON_USE_CPPTL -bool -Value::isMember( const CppTL::ConstString &key ) const -{ - return isMember( key.c_str() ); -} -#endif - -Value::Members -Value::getMemberNames() const -{ - JSON_ASSERT( type_ == nullValue || type_ == objectValue ); - if ( type_ == nullValue ) - return Value::Members(); - Members members; - members.reserve( value_.map_->size() ); -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ObjectValues::const_iterator it = value_.map_->begin(); - ObjectValues::const_iterator itEnd = value_.map_->end(); - for ( ; it != itEnd; ++it ) - members.push_back( std::string( (*it).first.c_str() ) ); -#else - ValueInternalMap::IteratorState it; - ValueInternalMap::IteratorState itEnd; - value_.map_->makeBeginIterator( it ); - value_.map_->makeEndIterator( itEnd ); - for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) - members.push_back( std::string( ValueInternalMap::key( it ) ) ); -#endif - return members; -} -// -//# ifdef JSON_USE_CPPTL -//EnumMemberNames -//Value::enumMemberNames() const -//{ -// if ( type_ == objectValue ) -// { -// return CppTL::Enum::any( CppTL::Enum::transform( -// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), -// MemberNamesTransform() ) ); -// } -// return EnumMemberNames(); -//} -// -// -//EnumValues -//Value::enumValues() const -//{ -// if ( type_ == objectValue || type_ == arrayValue ) -// return CppTL::Enum::anyValues( *(value_.map_), -// CppTL::Type() ); -// return EnumValues(); -//} -// -//# endif - - -bool -Value::isNull() const -{ - return type_ == nullValue; -} - - -bool -Value::isBool() const -{ - return type_ == booleanValue; -} - - -bool -Value::isInt() const -{ - return type_ == intValue; -} - - -bool -Value::isUInt() const -{ - return type_ == uintValue; -} - - -bool -Value::isIntegral() const -{ - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; -} - - -bool -Value::isDouble() const -{ - return type_ == realValue; -} - - -bool -Value::isNumeric() const -{ - return isIntegral() || isDouble(); -} - - -bool -Value::isString() const -{ - return type_ == stringValue; -} - - -bool -Value::isArray() const -{ - return type_ == nullValue || type_ == arrayValue; -} - - -bool -Value::isObject() const -{ - return type_ == nullValue || type_ == objectValue; -} - - -void -Value::setComment( const char *comment, - CommentPlacement placement ) -{ - if ( !comments_ ) - comments_ = new CommentInfo[numberOfCommentPlacement]; - comments_[placement].setComment( comment ); -} - - -void -Value::setComment( const std::string &comment, - CommentPlacement placement ) -{ - setComment( comment.c_str(), placement ); -} - - -bool -Value::hasComment( CommentPlacement placement ) const -{ - return comments_ != 0 && comments_[placement].comment_ != 0; -} - -std::string -Value::getComment( CommentPlacement placement ) const -{ - if ( hasComment(placement) ) - return comments_[placement].comment_; - return ""; -} - - -std::string -Value::toStyledString() const -{ - StyledWriter writer; - return writer.write( *this ); -} - - -Value::const_iterator -Value::begin() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - -Value::const_iterator -Value::end() const -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return const_iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return const_iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return const_iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return const_iterator(); -} - - -Value::iterator -Value::begin() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeBeginIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeBeginIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->begin() ); - break; -#endif - default: - break; - } - return iterator(); -} - -Value::iterator -Value::end() -{ - switch ( type_ ) - { -#ifdef JSON_VALUE_USE_INTERNAL_MAP - case arrayValue: - if ( value_.array_ ) - { - ValueInternalArray::IteratorState it; - value_.array_->makeEndIterator( it ); - return iterator( it ); - } - break; - case objectValue: - if ( value_.map_ ) - { - ValueInternalMap::IteratorState it; - value_.map_->makeEndIterator( it ); - return iterator( it ); - } - break; -#else - case arrayValue: - case objectValue: - if ( value_.map_ ) - return iterator( value_.map_->end() ); - break; -#endif - default: - break; - } - return iterator(); -} - - -// class PathArgument -// ////////////////////////////////////////////////////////////////// - -PathArgument::PathArgument() - : kind_( kindNone ) -{ -} - - -PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) - , kind_( kindIndex ) -{ -} - - -PathArgument::PathArgument( const char *key ) - : key_( key ) - , kind_( kindKey ) -{ -} - - -PathArgument::PathArgument( const std::string &key ) - : key_( key.c_str() ) - , kind_( kindKey ) -{ -} - -// class Path -// ////////////////////////////////////////////////////////////////// - -Path::Path( const std::string &path, - const PathArgument &a1, - const PathArgument &a2, - const PathArgument &a3, - const PathArgument &a4, - const PathArgument &a5 ) -{ - InArgs in; - in.push_back( &a1 ); - in.push_back( &a2 ); - in.push_back( &a3 ); - in.push_back( &a4 ); - in.push_back( &a5 ); - makePath( path, in ); -} - - -void -Path::makePath( const std::string &path, - const InArgs &in ) -{ - const char *current = path.c_str(); - const char *end = current + path.length(); - InArgs::const_iterator itInArg = in.begin(); - while ( current != end ) - { - if ( *current == '[' ) - { - ++current; - if ( *current == '%' ) - addPathInArg( path, in, itInArg, PathArgument::kindIndex ); - else - { - ArrayIndex index = 0; - for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) - index = index * 10 + ArrayIndex(*current - '0'); - args_.push_back( index ); - } - if ( current == end || *current++ != ']' ) - invalidPath( path, int(current - path.c_str()) ); - } - else if ( *current == '%' ) - { - addPathInArg( path, in, itInArg, PathArgument::kindKey ); - ++current; - } - else if ( *current == '.' ) - { - ++current; - } - else - { - const char *beginName = current; - while ( current != end && !strchr( "[.", *current ) ) - ++current; - args_.push_back( std::string( beginName, current ) ); - } - } -} - - -void -Path::addPathInArg( const std::string &path, - const InArgs &in, - InArgs::const_iterator &itInArg, - PathArgument::Kind kind ) -{ - if ( itInArg == in.end() ) - { - // Error: missing argument %d - } - else if ( (*itInArg)->kind_ != kind ) - { - // Error: bad argument type - } - else - { - args_.push_back( **itInArg ); - } -} - - -void -Path::invalidPath( const std::string &path, - int location ) -{ - // Error: invalid path. -} - - -const Value & -Path::resolve( const Value &root ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - { - // Error: unable to resolve path (array value expected at position... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: unable to resolve path (object value expected at position...) - } - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - { - // Error: unable to resolve path (object has no member named '' at position...) - } - } - } - return *node; -} - - -Value -Path::resolve( const Value &root, - const Value &defaultValue ) const -{ - const Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) - return defaultValue; - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - return defaultValue; - node = &((*node)[arg.key_]); - if ( node == &Value::null ) - return defaultValue; - } - } - return *node; -} - - -Value & -Path::make( Value &root ) const -{ - Value *node = &root; - for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) - { - const PathArgument &arg = *it; - if ( arg.kind_ == PathArgument::kindIndex ) - { - if ( !node->isArray() ) - { - // Error: node is not an array at position ... - } - node = &((*node)[arg.index_]); - } - else if ( arg.kind_ == PathArgument::kindKey ) - { - if ( !node->isObject() ) - { - // Error: node is not an object at position... - } - node = &((*node)[arg.key_]); - } - } - return *node; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl deleted file mode 100644 index 7457ca3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -// included by json_value.cpp - -namespace Json { - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIteratorBase -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIteratorBase::ValueIteratorBase() -#ifndef JSON_VALUE_USE_INTERNAL_MAP - : current_() - , isNull_( true ) -{ -} -#else - : isArray_( true ) - , isNull_( true ) -{ - iterator_.array_ = ValueInternalArray::IteratorState(); -} -#endif - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) - : current_( current ) - , isNull_( false ) -{ -} -#else -ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) - : isArray_( true ) -{ - iterator_.array_ = state; -} - - -ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) - : isArray_( false ) -{ - iterator_.map_ = state; -} -#endif - -Value & -ValueIteratorBase::deref() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - return current_->second; -#else - if ( isArray_ ) - return ValueInternalArray::dereference( iterator_.array_ ); - return ValueInternalMap::value( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::increment() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - ++current_; -#else - if ( isArray_ ) - ValueInternalArray::increment( iterator_.array_ ); - ValueInternalMap::increment( iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::decrement() -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - --current_; -#else - if ( isArray_ ) - ValueInternalArray::decrement( iterator_.array_ ); - ValueInternalMap::decrement( iterator_.map_ ); -#endif -} - - -ValueIteratorBase::difference_type -ValueIteratorBase::computeDistance( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP -# ifdef JSON_USE_CPPTL_SMALLMAP - return current_ - other.current_; -# else - // Iterator for null value are initialized using the default - // constructor, which initialize current_ to the default - // std::map::iterator. As begin() and end() are two instance - // of the default std::map::iterator, they can not be compared. - // To allow this, we handle this comparison specifically. - if ( isNull_ && other.isNull_ ) - { - return 0; - } - - - // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, - // which is the one used by default). - // Using a portable hand-made version for non random iterator instead: - // return difference_type( std::distance( current_, other.current_ ) ); - difference_type myDistance = 0; - for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) - { - ++myDistance; - } - return myDistance; -# endif -#else - if ( isArray_ ) - return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -bool -ValueIteratorBase::isEqual( const SelfType &other ) const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - if ( isNull_ ) - { - return other.isNull_; - } - return current_ == other.current_; -#else - if ( isArray_ ) - return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); - return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); -#endif -} - - -void -ValueIteratorBase::copy( const SelfType &other ) -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - current_ = other.current_; -#else - if ( isArray_ ) - iterator_.array_ = other.iterator_.array_; - iterator_.map_ = other.iterator_.map_; -#endif -} - - -Value -ValueIteratorBase::key() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( czstring.c_str() ) - { - if ( czstring.isStaticString() ) - return Value( StaticString( czstring.c_str() ) ); - return Value( czstring.c_str() ); - } - return Value( czstring.index() ); -#else - if ( isArray_ ) - return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); - bool isStatic; - const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); - if ( isStatic ) - return Value( StaticString( memberName ) ); - return Value( memberName ); -#endif -} - - -UInt -ValueIteratorBase::index() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const Value::CZString czstring = (*current_).first; - if ( !czstring.c_str() ) - return czstring.index(); - return Value::UInt( -1 ); -#else - if ( isArray_ ) - return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); - return Value::UInt( -1 ); -#endif -} - - -const char * -ValueIteratorBase::memberName() const -{ -#ifndef JSON_VALUE_USE_INTERNAL_MAP - const char *name = (*current_).first.c_str(); - return name ? name : ""; -#else - if ( !isArray_ ) - return ValueInternalMap::key( iterator_.map_ ); - return ""; -#endif -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueConstIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueConstIterator::ValueConstIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueConstIterator & -ValueConstIterator::operator =( const ValueIteratorBase &other ) -{ - copy( other ); - return *this; -} - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// class ValueIterator -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - -ValueIterator::ValueIterator() -{ -} - - -#ifndef JSON_VALUE_USE_INTERNAL_MAP -ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) - : ValueIteratorBase( current ) -{ -} -#else -ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} - -ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) - : ValueIteratorBase( state ) -{ -} -#endif - -ValueIterator::ValueIterator( const ValueConstIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator::ValueIterator( const ValueIterator &other ) - : ValueIteratorBase( other ) -{ -} - -ValueIterator & -ValueIterator::operator =( const SelfType &other ) -{ - copy( other ); - return *this; -} - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp deleted file mode 100644 index 1bda183..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp +++ /dev/null @@ -1,838 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#if !defined(JSON_IS_AMALGAMATION) -# include -# include "json_tool.h" -#endif // if !defined(JSON_IS_AMALGAMATION) -#include -#include -#include -#include -#include -#include -#include - -#if _MSC_VER >= 1400 // VC++ 8.0 -#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. -#endif - -namespace Json { - -static bool containsControlCharacter( const char* str ) -{ - while ( *str ) - { - if ( isControlCharacter( *(str++) ) ) - return true; - } - return false; -} - - -std::string valueToString( LargestInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - bool isNegative = value < 0; - if ( isNegative ) - value = -value; - uintToString( LargestUInt(value), current ); - if ( isNegative ) - *--current = '-'; - assert( current >= buffer ); - return current; -} - - -std::string valueToString( LargestUInt value ) -{ - UIntToStringBuffer buffer; - char *current = buffer + sizeof(buffer); - uintToString( value, current ); - assert( current >= buffer ); - return current; -} - -#if defined(JSON_HAS_INT64) - -std::string valueToString( Int value ) -{ - return valueToString( LargestInt(value) ); -} - - -std::string valueToString( UInt value ) -{ - return valueToString( LargestUInt(value) ); -} - -#endif // # if defined(JSON_HAS_INT64) - - -std::string valueToString( double value ) -{ - char buffer[32]; -#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. - sprintf_s(buffer, sizeof(buffer), "%#.16g", value); -#else - sprintf(buffer, "%#.16g", value); -#endif - char* ch = buffer + strlen(buffer) - 1; - if (*ch != '0') return buffer; // nothing to truncate, so save time - while(ch > buffer && *ch == '0'){ - --ch; - } - char* last_nonzero = ch; - while(ch >= buffer){ - switch(*ch){ - case '0': - case '1': - case '2': - case '3': - case '4': - case '5': - case '6': - case '7': - case '8': - case '9': - --ch; - continue; - case '.': - // Truncate zeroes to save bytes in output, but keep one. - *(last_nonzero+2) = '\0'; - return buffer; - default: - return buffer; - } - } - return buffer; -} - - -std::string valueToString( bool value ) -{ - return value ? "true" : "false"; -} - -std::string valueToQuotedString( const char *value ) -{ - // Not sure how to handle unicode... - if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) - return std::string("\"") + value + "\""; - // We have to walk value and escape any special characters. - // Appending to std::string is not efficient, but this should be rare. - // (Note: forward slashes are *not* rare, but I am not escaping them.) - std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL - std::string result; - result.reserve(maxsize); // to avoid lots of mallocs - result += "\""; - for (const char* c=value; *c != 0; ++c) - { - switch(*c) - { - case '\"': - result += "\\\""; - break; - case '\\': - result += "\\\\"; - break; - case '\b': - result += "\\b"; - break; - case '\f': - result += "\\f"; - break; - case '\n': - result += "\\n"; - break; - case '\r': - result += "\\r"; - break; - case '\t': - result += "\\t"; - break; - //case '/': - // Even though \/ is considered a legal escape in JSON, a bare - // slash is also legal, so I see no reason to escape it. - // (I hope I am not misunderstanding something. - // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); - result += oss.str(); - } - else - { - result += *c; - } - break; - } - } - result += "\""; - return result; -} - -// Class Writer -// ////////////////////////////////////////////////////////////////// -Writer::~Writer() -{ -} - - -// Class FastWriter -// ////////////////////////////////////////////////////////////////// - -FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) -{ -} - - -void -FastWriter::enableYAMLCompatibility() -{ - yamlCompatiblityEnabled_ = true; -} - - -std::string -FastWriter::write( const Value &root ) -{ - document_ = ""; - writeValue( root ); - document_ += "\n"; - return document_; -} - - -void -FastWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - document_ += "null"; - break; - case intValue: - document_ += valueToString( value.asLargestInt() ); - break; - case uintValue: - document_ += valueToString( value.asLargestUInt() ); - break; - case realValue: - document_ += valueToString( value.asDouble() ); - break; - case stringValue: - document_ += valueToQuotedString( value.asCString() ); - break; - case booleanValue: - document_ += valueToString( value.asBool() ); - break; - case arrayValue: - { - document_ += "["; - int size = value.size(); - for ( int index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ","; - writeValue( value[index] ); - } - document_ += "]"; - } - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - document_ += "{"; - for ( Value::Members::iterator it = members.begin(); - it != members.end(); - ++it ) - { - const std::string &name = *it; - if ( it != members.begin() ) - document_ += ","; - document_ += valueToQuotedString( name.c_str() ); - document_ += yamlCompatiblityEnabled_ ? ": " - : ":"; - writeValue( value[name] ); - } - document_ += "}"; - } - break; - } -} - - -// Class StyledWriter -// ////////////////////////////////////////////////////////////////// - -StyledWriter::StyledWriter() - : rightMargin_( 74 ) - , indentSize_( 3 ) -{ -} - - -std::string -StyledWriter::write( const Value &root ) -{ - document_ = ""; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - document_ += "\n"; - return document_; -} - - -void -StyledWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - for (;;) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - document_ += " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - for (;;) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - document_ += ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - document_ += "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - document_ += ", "; - document_ += childValues_[index]; - } - document_ += " ]"; - } - } -} - - -bool -StyledWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - document_ += value; -} - - -void -StyledWriter::writeIndent() -{ - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - document_ += '\n'; - } - document_ += indentString_; -} - - -void -StyledWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - document_ += value; -} - - -void -StyledWriter::indent() -{ - indentString_ += std::string( indentSize_, ' ' ); -} - - -void -StyledWriter::unindent() -{ - assert( int(indentString_.size()) >= indentSize_ ); - indentString_.resize( indentString_.size() - indentSize_ ); -} - - -void -StyledWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - document_ += normalizeEOL( root.getComment( commentBefore ) ); - document_ += "\n"; -} - - -void -StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - document_ += "\n"; - document_ += normalizeEOL( root.getComment( commentAfter ) ); - document_ += "\n"; - } -} - - -bool -StyledWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -// Class StyledStreamWriter -// ////////////////////////////////////////////////////////////////// - -StyledStreamWriter::StyledStreamWriter( std::string indentation ) - : document_(NULL) - , rightMargin_( 74 ) - , indentation_( indentation ) -{ -} - - -void -StyledStreamWriter::write( std::ostream &out, const Value &root ) -{ - document_ = &out; - addChildValues_ = false; - indentString_ = ""; - writeCommentBeforeValue( root ); - writeValue( root ); - writeCommentAfterValueOnSameLine( root ); - *document_ << "\n"; - document_ = NULL; // Forget the stream, for safety. -} - - -void -StyledStreamWriter::writeValue( const Value &value ) -{ - switch ( value.type() ) - { - case nullValue: - pushValue( "null" ); - break; - case intValue: - pushValue( valueToString( value.asLargestInt() ) ); - break; - case uintValue: - pushValue( valueToString( value.asLargestUInt() ) ); - break; - case realValue: - pushValue( valueToString( value.asDouble() ) ); - break; - case stringValue: - pushValue( valueToQuotedString( value.asCString() ) ); - break; - case booleanValue: - pushValue( valueToString( value.asBool() ) ); - break; - case arrayValue: - writeArrayValue( value); - break; - case objectValue: - { - Value::Members members( value.getMemberNames() ); - if ( members.empty() ) - pushValue( "{}" ); - else - { - writeWithIndent( "{" ); - indent(); - Value::Members::iterator it = members.begin(); - for (;;) - { - const std::string &name = *it; - const Value &childValue = value[name]; - writeCommentBeforeValue( childValue ); - writeWithIndent( valueToQuotedString( name.c_str() ) ); - *document_ << " : "; - writeValue( childValue ); - if ( ++it == members.end() ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "}" ); - } - } - break; - } -} - - -void -StyledStreamWriter::writeArrayValue( const Value &value ) -{ - unsigned size = value.size(); - if ( size == 0 ) - pushValue( "[]" ); - else - { - bool isArrayMultiLine = isMultineArray( value ); - if ( isArrayMultiLine ) - { - writeWithIndent( "[" ); - indent(); - bool hasChildValue = !childValues_.empty(); - unsigned index =0; - for (;;) - { - const Value &childValue = value[index]; - writeCommentBeforeValue( childValue ); - if ( hasChildValue ) - writeWithIndent( childValues_[index] ); - else - { - writeIndent(); - writeValue( childValue ); - } - if ( ++index == size ) - { - writeCommentAfterValueOnSameLine( childValue ); - break; - } - *document_ << ","; - writeCommentAfterValueOnSameLine( childValue ); - } - unindent(); - writeWithIndent( "]" ); - } - else // output on a single line - { - assert( childValues_.size() == size ); - *document_ << "[ "; - for ( unsigned index =0; index < size; ++index ) - { - if ( index > 0 ) - *document_ << ", "; - *document_ << childValues_[index]; - } - *document_ << " ]"; - } - } -} - - -bool -StyledStreamWriter::isMultineArray( const Value &value ) -{ - int size = value.size(); - bool isMultiLine = size*3 >= rightMargin_ ; - childValues_.clear(); - for ( int index =0; index < size && !isMultiLine; ++index ) - { - const Value &childValue = value[index]; - isMultiLine = isMultiLine || - ( (childValue.isArray() || childValue.isObject()) && - childValue.size() > 0 ); - } - if ( !isMultiLine ) // check if line length > max line length - { - childValues_.reserve( size ); - addChildValues_ = true; - int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' - for ( int index =0; index < size && !isMultiLine; ++index ) - { - writeValue( value[index] ); - lineLength += int( childValues_[index].length() ); - isMultiLine = isMultiLine && hasCommentForValue( value[index] ); - } - addChildValues_ = false; - isMultiLine = isMultiLine || lineLength >= rightMargin_; - } - return isMultiLine; -} - - -void -StyledStreamWriter::pushValue( const std::string &value ) -{ - if ( addChildValues_ ) - childValues_.push_back( value ); - else - *document_ << value; -} - - -void -StyledStreamWriter::writeIndent() -{ - /* - Some comments in this method would have been nice. ;-) - - if ( !document_.empty() ) - { - char last = document_[document_.length()-1]; - if ( last == ' ' ) // already indented - return; - if ( last != '\n' ) // Comments may add new-line - *document_ << '\n'; - } - */ - *document_ << '\n' << indentString_; -} - - -void -StyledStreamWriter::writeWithIndent( const std::string &value ) -{ - writeIndent(); - *document_ << value; -} - - -void -StyledStreamWriter::indent() -{ - indentString_ += indentation_; -} - - -void -StyledStreamWriter::unindent() -{ - assert( indentString_.size() >= indentation_.size() ); - indentString_.resize( indentString_.size() - indentation_.size() ); -} - - -void -StyledStreamWriter::writeCommentBeforeValue( const Value &root ) -{ - if ( !root.hasComment( commentBefore ) ) - return; - *document_ << normalizeEOL( root.getComment( commentBefore ) ); - *document_ << "\n"; -} - - -void -StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) -{ - if ( root.hasComment( commentAfterOnSameLine ) ) - *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); - - if ( root.hasComment( commentAfter ) ) - { - *document_ << "\n"; - *document_ << normalizeEOL( root.getComment( commentAfter ) ); - *document_ << "\n"; - } -} - - -bool -StyledStreamWriter::hasCommentForValue( const Value &value ) -{ - return value.hasComment( commentBefore ) - || value.hasComment( commentAfterOnSameLine ) - || value.hasComment( commentAfter ); -} - - -std::string -StyledStreamWriter::normalizeEOL( const std::string &text ) -{ - std::string normalized; - normalized.reserve( text.length() ); - const char *begin = text.c_str(); - const char *end = begin + text.length(); - const char *current = begin; - while ( current != end ) - { - char c = *current++; - if ( c == '\r' ) // mac or dos EOL - { - if ( *current == '\n' ) // convert dos EOL - ++current; - normalized += '\n'; - } - else // handle unix EOL & other char - normalized += c; - } - return normalized; -} - - -std::ostream& operator<<( std::ostream &sout, const Value &root ) -{ - Json::StyledStreamWriter writer; - writer.write(sout, root); - return sout; -} - - -} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript deleted file mode 100644 index 6e7c6c8..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript +++ /dev/null @@ -1,8 +0,0 @@ -Import( 'env buildLibrary' ) - -buildLibrary( env, Split( """ - json_reader.cpp - json_value.cpp - json_writer.cpp - """ ), - 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp deleted file mode 100644 index 02e7b21..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC -#include "jsontest.h" -#include -#include - -#if defined(_MSC_VER) -// Used to install a report hook that prevent dialog on assertion and error. -# include -#endif // if defined(_MSC_VER) - -#if defined(_WIN32) -// Used to prevent dialog on memory fault. -// Limits headers included by Windows.h -# define WIN32_LEAN_AND_MEAN -# define NOSERVICE -# define NOMCX -# define NOIME -# define NOSOUND -# define NOCOMM -# define NORPC -# define NOGDI -# define NOUSER -# define NODRIVERS -# define NOLOGERROR -# define NOPROFILER -# define NOMEMMGR -# define NOLFILEIO -# define NOOPENFILE -# define NORESOURCE -# define NOATOM -# define NOLANGUAGE -# define NOLSTRING -# define NODBCS -# define NOKEYBOARDINFO -# define NOGDICAPMASKS -# define NOCOLOR -# define NOGDIOBJ -# define NODRAWTEXT -# define NOTEXTMETRIC -# define NOSCALABLEFONT -# define NOBITMAP -# define NORASTEROPS -# define NOMETAFILE -# define NOSYSMETRICS -# define NOSYSTEMPARAMSINFO -# define NOMSG -# define NOWINSTYLES -# define NOWINOFFSETS -# define NOSHOWWINDOW -# define NODEFERWINDOWPOS -# define NOVIRTUALKEYCODES -# define NOKEYSTATES -# define NOWH -# define NOMENUS -# define NOSCROLL -# define NOCLIPBOARD -# define NOICONS -# define NOMB -# define NOSYSCOMMANDS -# define NOMDI -# define NOCTLMGR -# define NOWINMESSAGES -# include -#endif // if defined(_WIN32) - -namespace JsonTest { - - -// class TestResult -// ////////////////////////////////////////////////////////////////// - -TestResult::TestResult() - : predicateId_( 1 ) - , lastUsedPredicateId_( 0 ) - , messageTarget_( 0 ) -{ - // The root predicate has id 0 - rootPredicateNode_.id_ = 0; - rootPredicateNode_.next_ = 0; - predicateStackTail_ = &rootPredicateNode_; -} - - -void -TestResult::setTestName( const std::string &name ) -{ - name_ = name; -} - -TestResult & -TestResult::addFailure( const char *file, unsigned int line, - const char *expr ) -{ - /// Walks the PredicateContext stack adding them to failures_ if not already added. - unsigned int nestingLevel = 0; - PredicateContext *lastNode = rootPredicateNode_.next_; - for ( ; lastNode != 0; lastNode = lastNode->next_ ) - { - if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext - { - lastUsedPredicateId_ = lastNode->id_; - addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, - nestingLevel ); - // Link the PredicateContext to the failure for message target when - // popping the PredicateContext. - lastNode->failure_ = &( failures_.back() ); - } - ++nestingLevel; - } - - // Adds the failed assertion - addFailureInfo( file, line, expr, nestingLevel ); - messageTarget_ = &( failures_.back() ); - return *this; -} - - -void -TestResult::addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ) -{ - Failure failure; - failure.file_ = file; - failure.line_ = line; - if ( expr ) - { - failure.expr_ = expr; - } - failure.nestingLevel_ = nestingLevel; - failures_.push_back( failure ); -} - - -TestResult & -TestResult::popPredicateContext() -{ - PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) - { - lastNode = lastNode->next_; - } - // Set message target to popped failure - PredicateContext *tail = lastNode->next_; - if ( tail != 0 && tail->failure_ != 0 ) - { - messageTarget_ = tail->failure_; - } - // Remove tail from list - predicateStackTail_ = lastNode; - lastNode->next_ = 0; - return *this; -} - - -bool -TestResult::failed() const -{ - return !failures_.empty(); -} - - -unsigned int -TestResult::getAssertionNestingLevel() const -{ - unsigned int level = 0; - const PredicateContext *lastNode = &rootPredicateNode_; - while ( lastNode->next_ != 0 ) - { - lastNode = lastNode->next_; - ++level; - } - return level; -} - - -void -TestResult::printFailure( bool printTestName ) const -{ - if ( failures_.empty() ) - { - return; - } - - if ( printTestName ) - { - printf( "* Detail of %s test failure:\n", name_.c_str() ); - } - - // Print in reverse to display the callstack in the right order - Failures::const_iterator itEnd = failures_.end(); - for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) - { - const Failure &failure = *it; - std::string indent( failure.nestingLevel_ * 2, ' ' ); - if ( failure.file_ ) - { - printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); - } - if ( !failure.expr_.empty() ) - { - printf( "%s\n", failure.expr_.c_str() ); - } - else if ( failure.file_ ) - { - printf( "\n" ); - } - if ( !failure.message_.empty() ) - { - std::string reindented = indentText( failure.message_, indent + " " ); - printf( "%s\n", reindented.c_str() ); - } - } -} - - -std::string -TestResult::indentText( const std::string &text, - const std::string &indent ) -{ - std::string reindented; - std::string::size_type lastIndex = 0; - while ( lastIndex < text.size() ) - { - std::string::size_type nextIndex = text.find( '\n', lastIndex ); - if ( nextIndex == std::string::npos ) - { - nextIndex = text.size() - 1; - } - reindented += indent; - reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); - lastIndex = nextIndex + 1; - } - return reindented; -} - - -TestResult & -TestResult::addToLastFailure( const std::string &message ) -{ - if ( messageTarget_ != 0 ) - { - messageTarget_->message_ += message; - } - return *this; -} - - -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - - -// class TestCase -// ////////////////////////////////////////////////////////////////// - -TestCase::TestCase() - : result_( 0 ) -{ -} - - -TestCase::~TestCase() -{ -} - - -void -TestCase::run( TestResult &result ) -{ - result_ = &result; - runTestCase(); -} - - - -// class Runner -// ////////////////////////////////////////////////////////////////// - -Runner::Runner() -{ -} - - -Runner & -Runner::add( TestCaseFactory factory ) -{ - tests_.push_back( factory ); - return *this; -} - - -unsigned int -Runner::testCount() const -{ - return static_cast( tests_.size() ); -} - - -std::string -Runner::testNameAt( unsigned int index ) const -{ - TestCase *test = tests_[index](); - std::string name = test->testName(); - delete test; - return name; -} - - -void -Runner::runTestAt( unsigned int index, TestResult &result ) const -{ - TestCase *test = tests_[index](); - result.setTestName( test->testName() ); - printf( "Testing %s: ", test->testName() ); - fflush( stdout ); -#if JSON_USE_EXCEPTION - try - { -#endif // if JSON_USE_EXCEPTION - test->run( result ); -#if JSON_USE_EXCEPTION - } - catch ( const std::exception &e ) - { - result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); - } -#endif // if JSON_USE_EXCEPTION - delete test; - const char *status = result.failed() ? "FAILED" - : "OK"; - printf( "%s\n", status ); - fflush( stdout ); -} - - -bool -Runner::runAllTest( bool printSummary ) const -{ - unsigned int count = testCount(); - std::deque failures; - for ( unsigned int index = 0; index < count; ++index ) - { - TestResult result; - runTestAt( index, result ); - if ( result.failed() ) - { - failures.push_back( result ); - } - } - - if ( failures.empty() ) - { - if ( printSummary ) - { - printf( "All %d tests passed\n", count ); - } - return true; - } - else - { - for ( unsigned int index = 0; index < failures.size(); ++index ) - { - TestResult &result = failures[index]; - result.printFailure( count > 1 ); - } - - if ( printSummary ) - { - unsigned int failedCount = static_cast( failures.size() ); - unsigned int passedCount = count - failedCount; - printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); - } - return false; - } -} - - -bool -Runner::testIndex( const std::string &testName, - unsigned int &indexOut ) const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - if ( testNameAt(index) == testName ) - { - indexOut = index; - return true; - } - } - return false; -} - - -void -Runner::listTests() const -{ - unsigned int count = testCount(); - for ( unsigned int index = 0; index < count; ++index ) - { - printf( "%s\n", testNameAt( index ).c_str() ); - } -} - - -int -Runner::runCommandLine( int argc, const char *argv[] ) const -{ - typedef std::deque TestNames; - Runner subrunner; - for ( int index = 1; index < argc; ++index ) - { - std::string opt = argv[index]; - if ( opt == "--list-tests" ) - { - listTests(); - return 0; - } - else if ( opt == "--test-auto" ) - { - preventDialogOnCrash(); - } - else if ( opt == "--test" ) - { - ++index; - if ( index < argc ) - { - unsigned int testNameIndex; - if ( testIndex( argv[index], testNameIndex ) ) - { - subrunner.add( tests_[testNameIndex] ); - } - else - { - fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - else - { - printUsage( argv[0] ); - return 2; - } - } - bool succeeded; - if ( subrunner.testCount() > 0 ) - { - succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); - } - else - { - succeeded = runAllTest( true ); - } - return succeeded ? 0 - : 1; -} - - -#if defined(_MSC_VER) -// Hook MSVCRT assertions to prevent dialog from appearing -static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) -{ - // The default CRT handling of error and assertion is to display - // an error dialog to the user. - // Instead, when an error or an assertion occurs, we force the - // application to terminate using abort() after display - // the message on stderr. - if ( reportType == _CRT_ERROR || - reportType == _CRT_ASSERT ) - { - // calling abort() cause the ReportHook to be called - // The following is used to detect this case and let's the - // error handler fallback on its default behaviour ( - // display a warning message) - static volatile bool isAborting = false; - if ( isAborting ) - { - return TRUE; - } - isAborting = true; - - fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); - fflush( stderr ); - abort(); - } - // Let's other reportType (_CRT_WARNING) be handled as they would by default - return FALSE; -} -#endif // if defined(_MSC_VER) - - -void -Runner::preventDialogOnCrash() -{ -#if defined(_MSC_VER) - // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. - _CrtSetReportHook( &msvcrtSilentReportHook ); -#endif // if defined(_MSC_VER) - - // @todo investiguate this handler (for buffer overflow) - // _set_security_error_handler - -#if defined(_WIN32) - // Prevents the system from popping a dialog for debugging if the - // application fails due to invalid memory access. - SetErrorMode( SEM_FAILCRITICALERRORS - | SEM_NOGPFAULTERRORBOX - | SEM_NOOPENFILEERRORBOX ); -#endif // if defined(_WIN32) -} - -void -Runner::printUsage( const char *appName ) -{ - printf( - "Usage: %s [options]\n" - "\n" - "If --test is not specified, then all the test cases be run.\n" - "\n" - "Valid options:\n" - "--list-tests: print the name of all test cases on the standard\n" - " output and exit.\n" - "--test TESTNAME: executes the test case with the specified name.\n" - " May be repeated.\n" - "--test-auto: prevent dialog prompting for debugging on crash.\n" - , appName ); -} - - - -// Assertion functions -// ////////////////////////////////////////////////////////////////// - -TestResult & -checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ) -{ - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: '" << expected << "'\n"; - result << "Actual : '" << actual << "'"; - } - return result; -} - - -} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h deleted file mode 100644 index 0d07238..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h +++ /dev/null @@ -1,259 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#ifndef JSONTEST_H_INCLUDED -# define JSONTEST_H_INCLUDED - -# include -# include -# include -# include - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Mini Unit Testing framework -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - - -/** \brief Unit testing framework. - * \warning: all assertions are non-aborting, test case execution will continue - * even if an assertion namespace. - * This constraint is for portability: the framework needs to compile - * on Visual Studio 6 and must not require exception usage. - */ -namespace JsonTest { - - - class Failure - { - public: - const char *file_; - unsigned int line_; - std::string expr_; - std::string message_; - unsigned int nestingLevel_; - }; - - - /// Context used to create the assertion callstack on failure. - /// Must be a POD to allow inline initialisation without stepping - /// into the debugger. - struct PredicateContext - { - typedef unsigned int Id; - Id id_; - const char *file_; - unsigned int line_; - const char *expr_; - PredicateContext *next_; - /// Related Failure, set when the PredicateContext is converted - /// into a Failure. - Failure *failure_; - }; - - class TestResult - { - public: - TestResult(); - - /// \internal Implementation detail for assertion macros - /// Not encapsulated to prevent step into when debugging failed assertions - /// Incremented by one on assertion predicate entry, decreased by one - /// by addPredicateContext(). - PredicateContext::Id predicateId_; - - /// \internal Implementation detail for predicate macros - PredicateContext *predicateStackTail_; - - void setTestName( const std::string &name ); - - /// Adds an assertion failure. - TestResult &addFailure( const char *file, unsigned int line, - const char *expr = 0 ); - - /// Removes the last PredicateContext added to the predicate stack - /// chained list. - /// Next messages will be targed at the PredicateContext that was removed. - TestResult &popPredicateContext(); - - bool failed() const; - - void printFailure( bool printTestName ) const; - - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); - - private: - TestResult &addToLastFailure( const std::string &message ); - unsigned int getAssertionNestingLevel() const; - /// Adds a failure or a predicate context - void addFailureInfo( const char *file, unsigned int line, - const char *expr, unsigned int nestingLevel ); - static std::string indentText( const std::string &text, - const std::string &indent ); - - typedef std::deque Failures; - Failures failures_; - std::string name_; - PredicateContext rootPredicateNode_; - PredicateContext::Id lastUsedPredicateId_; - /// Failure which is the target of the messages added using operator << - Failure *messageTarget_; - }; - - - class TestCase - { - public: - TestCase(); - - virtual ~TestCase(); - - void run( TestResult &result ); - - virtual const char *testName() const = 0; - - protected: - TestResult *result_; - - private: - virtual void runTestCase() = 0; - }; - - /// Function pointer type for TestCase factory - typedef TestCase *(*TestCaseFactory)(); - - class Runner - { - public: - Runner(); - - /// Adds a test to the suite - Runner &add( TestCaseFactory factory ); - - /// Runs test as specified on the command-line - /// If no command-line arguments are provided, run all tests. - /// If --list-tests is provided, then print the list of all test cases - /// If --test is provided, then run test testname. - int runCommandLine( int argc, const char *argv[] ) const; - - /// Runs all the test cases - bool runAllTest( bool printSummary ) const; - - /// Returns the number of test case in the suite - unsigned int testCount() const; - - /// Returns the name of the test case at the specified index - std::string testNameAt( unsigned int index ) const; - - /// Runs the test case at the specified index using the specified TestResult - void runTestAt( unsigned int index, TestResult &result ) const; - - static void printUsage( const char *appName ); - - private: // prevents copy construction and assignment - Runner( const Runner &other ); - Runner &operator =( const Runner &other ); - - private: - void listTests() const; - bool testIndex( const std::string &testName, unsigned int &index ) const; - static void preventDialogOnCrash(); - - private: - typedef std::deque Factories; - Factories tests_; - }; - - template - TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, - const char *file, unsigned int line, const char *expr ) - { - if ( expected != actual ) - { - result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; - result << "Actual : " << actual; - } - return result; - } - - TestResult & - checkStringEqual( TestResult &result, - const std::string &expected, const std::string &actual, - const char *file, unsigned int line, const char *expr ); - -} // namespace JsonTest - - -/// \brief Asserts that the given expression is true. -/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; -/// JSONTEST_ASSERT( x == y ); -#define JSONTEST_ASSERT( expr ) \ - if ( expr ) \ - { \ - } \ - else \ - result_->addFailure( __FILE__, __LINE__, #expr ) - -/// \brief Asserts that the given predicate is true. -/// The predicate may do other assertions and be a member function of the fixture. -#define JSONTEST_ASSERT_PRED( expr ) \ - { \ - JsonTest::PredicateContext _minitest_Context = { \ - result_->predicateId_, __FILE__, __LINE__, #expr }; \ - result_->predicateStackTail_->next_ = &_minitest_Context; \ - result_->predicateId_ += 1; \ - result_->predicateStackTail_ = &_minitest_Context; \ - (expr); \ - result_->popPredicateContext(); \ - } \ - *result_ - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ - JsonTest::checkEqual( *result_, expected, actual, \ - __FILE__, __LINE__, \ - #expected " == " #actual ) - -/// \brief Asserts that two values are equals. -#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ - JsonTest::checkStringEqual( *result_, \ - std::string(expected), std::string(actual), \ - #expected " == " #actual ) - -/// \brief Begin a fixture test case. -#define JSONTEST_FIXTURE( FixtureType, name ) \ - class Test##FixtureType##name : public FixtureType \ - { \ - public: \ - static JsonTest::TestCase *factory() \ - { \ - return new Test##FixtureType##name(); \ - } \ - public: /* overidden from TestCase */ \ - virtual const char *testName() const \ - { \ - return #FixtureType "/" #name; \ - } \ - virtual void runTestCase(); \ - }; \ - \ - void Test##FixtureType##name::runTestCase() - -#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ - &Test##FixtureType##name::factory - -#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ - (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) - -#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp deleted file mode 100644 index 3275219..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp +++ /dev/null @@ -1,430 +0,0 @@ -// Copyright 2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -#include -#include "jsontest.h" - - -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - - -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// -// Json Library test cases -// ////////////////////////////////////////////////////////////////// -// ////////////////////////////////////////////////////////////////// - - -struct ValueTest : JsonTest::TestCase -{ - Json::Value null_; - Json::Value emptyArray_; - Json::Value emptyObject_; - Json::Value integer_; - Json::Value unsignedInteger_; - Json::Value smallUnsignedInteger_; - Json::Value real_; - Json::Value float_; - Json::Value array1_; - Json::Value object1_; - Json::Value emptyString_; - Json::Value string1_; - Json::Value string_; - Json::Value true_; - Json::Value false_; - - - ValueTest() - : emptyArray_( Json::arrayValue ) - , emptyObject_( Json::objectValue ) - , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) - , unsignedInteger_( 34567890u ) - , real_( 1234.56789 ) - , float_( 0.00390625f ) - , emptyString_( "" ) - , string1_( "a" ) - , string_( "sometext with space" ) - , true_( true ) - , false_( false ) - { - array1_.append( 1234 ); - object1_["id"] = 1234; - } - - struct IsCheck - { - /// Initialize all checks to \c false by default. - IsCheck(); - - bool isObject_; - bool isArray_; - bool isBool_; - bool isDouble_; - bool isInt_; - bool isUInt_; - bool isIntegral_; - bool isNumeric_; - bool isString_; - bool isNull_; - }; - - void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); - - void checkMemberCount( Json::Value &value, unsigned int expectedCount ); - - void checkIs( const Json::Value &value, const IsCheck &check ); - - void checkIsLess( const Json::Value &x, const Json::Value &y ); - - void checkIsEqual( const Json::Value &x, const Json::Value &y ); -}; - - -JSONTEST_FIXTURE( ValueTest, size ) -{ - JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); - JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); - JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); -} - - -JSONTEST_FIXTURE( ValueTest, isObject ) -{ - IsCheck checks; - checks.isObject_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isArray ) -{ - IsCheck checks; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isNull ) -{ - IsCheck checks; - checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; - JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isString ) -{ - IsCheck checks; - checks.isString_ = true; - JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isBool ) -{ - IsCheck checks; - checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isDouble ) -{ - IsCheck checks; - checks.isDouble_ = true; - checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isInt ) -{ - IsCheck checks; - checks.isInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, isUInt ) -{ - IsCheck checks; - checks.isUInt_ = true; - checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); -} - - -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - -void -ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) -{ - unsigned int count = 0; - Json::Value::const_iterator itEnd = value.end(); - for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; -} - -void -ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) -{ - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); - - unsigned int count = 0; - Json::Value::iterator itEnd = value.end(); - for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) - { - ++count; - } - JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; - - JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); -} - - -ValueTest::IsCheck::IsCheck() - : isObject_( false ) - , isArray_( false ) - , isBool_( false ) - , isDouble_( false ) - , isInt_( false ) - , isUInt_( false ) - , isIntegral_( false ) - , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) -{ -} - - -void -ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) -{ - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); -} - - -JSONTEST_FIXTURE( ValueTest, compareNull ) -{ - JSONTEST_ASSERT_PRED( checkIsEqual( Json::Value(), Json::Value() ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareInt ) -{ - JSONTEST_ASSERT_PRED( checkIsLess( 0, 10 ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( 10, 10 ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( -10, -10 ) ); - JSONTEST_ASSERT_PRED( checkIsLess( -10, 0 ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareUInt ) -{ - JSONTEST_ASSERT_PRED( checkIsLess( 0u, 10u ) ); - JSONTEST_ASSERT_PRED( checkIsLess( 0u, Json::Value::maxUInt ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( 10u, 10u ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareDouble ) -{ - JSONTEST_ASSERT_PRED( checkIsLess( 0.0, 10.0 ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( 10.0, 10.0 ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( -10.0, -10.0 ) ); - JSONTEST_ASSERT_PRED( checkIsLess( -10.0, 0.0 ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareString ) -{ - JSONTEST_ASSERT_PRED( checkIsLess( "", " " ) ); - JSONTEST_ASSERT_PRED( checkIsLess( "", "a" ) ); - JSONTEST_ASSERT_PRED( checkIsLess( "abcd", "zyui" ) ); - JSONTEST_ASSERT_PRED( checkIsLess( "abc", "abcd" ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( "abcd", "abcd" ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( " ", " " ) ); - JSONTEST_ASSERT_PRED( checkIsLess( "ABCD", "abcd" ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( "ABCD", "ABCD" ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareBoolean ) -{ - JSONTEST_ASSERT_PRED( checkIsLess( false, true ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( false, false ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( true, true ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareArray ) -{ - // array compare size then content - Json::Value emptyArray(Json::arrayValue); - Json::Value l1aArray; - l1aArray.append( 0 ); - Json::Value l1bArray; - l1bArray.append( 10 ); - Json::Value l2aArray; - l2aArray.append( 0 ); - l2aArray.append( 0 ); - Json::Value l2bArray; - l2bArray.append( 0 ); - l2bArray.append( 10 ); - JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l1aArray ) ); - JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l2aArray ) ); - JSONTEST_ASSERT_PRED( checkIsLess( l1aArray, l2aArray ) ); - JSONTEST_ASSERT_PRED( checkIsLess( l2aArray, l2bArray ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( emptyArray, Json::Value( emptyArray ) ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( l1aArray, Json::Value( l1aArray) ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( l2bArray, Json::Value( l2bArray) ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareObject ) -{ - // object compare size then content - Json::Value emptyObject(Json::objectValue); - Json::Value l1aObject; - l1aObject["key1"] = 0; - Json::Value l1bObject; - l1aObject["key1"] = 10; - Json::Value l2aObject; - l2aObject["key1"] = 0; - l2aObject["key2"] = 0; - JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l1aObject ) ); - JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l2aObject ) ); - JSONTEST_ASSERT_PRED( checkIsLess( l1aObject, l2aObject ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( emptyObject, Json::Value( emptyObject ) ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( l1aObject, Json::Value( l1aObject ) ) ); - JSONTEST_ASSERT_PRED( checkIsEqual( l2aObject, Json::Value( l2aObject ) ) ); -} - - -JSONTEST_FIXTURE( ValueTest, compareType ) -{ - // object of different type are ordered according to their type - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(), Json::Value(1) ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1), Json::Value(1u) ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1u), Json::Value(1.0) ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1.0), Json::Value("a") ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value("a"), Json::Value(true) ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(true), Json::Value(Json::arrayValue) ) ); - JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(Json::arrayValue), Json::Value(Json::objectValue) ) ); -} - - -void -ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) -{ - JSONTEST_ASSERT( x < y ); - JSONTEST_ASSERT( y > x ); - JSONTEST_ASSERT( x <= y ); - JSONTEST_ASSERT( y >= x ); - JSONTEST_ASSERT( !(x == y) ); - JSONTEST_ASSERT( !(y == x) ); - JSONTEST_ASSERT( !(x >= y) ); - JSONTEST_ASSERT( !(y <= x) ); - JSONTEST_ASSERT( !(x > y) ); - JSONTEST_ASSERT( !(y < x) ); - JSONTEST_ASSERT( x.compare( y ) < 0 ); - JSONTEST_ASSERT( y.compare( x ) >= 0 ); -} - - -void -ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) -{ - JSONTEST_ASSERT( x == y ); - JSONTEST_ASSERT( y == x ); - JSONTEST_ASSERT( x <= y ); - JSONTEST_ASSERT( y <= x ); - JSONTEST_ASSERT( x >= y ); - JSONTEST_ASSERT( y >= x ); - JSONTEST_ASSERT( !(x < y) ); - JSONTEST_ASSERT( !(y < x) ); - JSONTEST_ASSERT( !(x > y) ); - JSONTEST_ASSERT( !(y > x) ); - JSONTEST_ASSERT( x.compare( y ) == 0 ); - JSONTEST_ASSERT( y.compare( x ) == 0 ); -} - - -int main( int argc, const char *argv[] ) -{ - JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareBoolean ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareType ); - return runner.runCommandLine( argc, argv ); -} diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript deleted file mode 100644 index 915fd01..0000000 --- a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript +++ /dev/null @@ -1,10 +0,0 @@ -Import( 'env_testing buildUnitTests' ) - -buildUnitTests( env_testing, Split( """ - main.cpp - jsontest.cpp - """ ), - 'test_lib_json' ) - -# For 'check' to work, 'libs' must be built first. -env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/test/cleantests.py b/tags/jsoncpp/0.6.0-rc2/test/cleantests.py deleted file mode 100644 index c38fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/cleantests.py +++ /dev/null @@ -1,10 +0,0 @@ -# removes all files created during testing -import glob -import os - -paths = [] -for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: - paths += glob.glob( 'data/' + pattern ) - -for path in paths: - os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json deleted file mode 100644 index 900fcc2..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected deleted file mode 100644 index a341ff7..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json deleted file mode 100644 index fe51488..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json +++ /dev/null @@ -1 +0,0 @@ -[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected deleted file mode 100644 index ef1f262..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=[] -.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json deleted file mode 100644 index 7660873..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json +++ /dev/null @@ -1 +0,0 @@ -[1] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected deleted file mode 100644 index 3d8dc18..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected +++ /dev/null @@ -1,6 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json deleted file mode 100644 index 9b3f924..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json +++ /dev/null @@ -1 +0,0 @@ -[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected deleted file mode 100644 index ad4add9..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]=1 -.[1]="abc" -.[2]=12.3 -.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json deleted file mode 100644 index ecca546..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json +++ /dev/null @@ -1 +0,0 @@ -[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected deleted file mode 100644 index 76cff87..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected +++ /dev/null @@ -1,100 +0,0 @@ -.=[] -.[0]=1 -.[1]=2 -.[2]=3 -.[3]=4 -.[4]=5 -.[5]=6 -.[6]=7 -.[7]=8 -.[8]=9 -.[9]=10 -.[10]=11 -.[11]=12 -.[12]=13 -.[13]=14 -.[14]=15 -.[15]=16 -.[16]=17 -.[17]=18 -.[18]=19 -.[19]=20 -.[20]=21 -.[21]=22 -.[22]=23 -.[23]=24 -.[24]=25 -.[25]=26 -.[26]=27 -.[27]=28 -.[28]=29 -.[29]=30 -.[30]=31 -.[31]=32 -.[32]=33 -.[33]=34 -.[34]=35 -.[35]=36 -.[36]=37 -.[37]=38 -.[38]=39 -.[39]=40 -.[40]=41 -.[41]=42 -.[42]=43 -.[43]=44 -.[44]=45 -.[45]=46 -.[46]=47 -.[47]=48 -.[48]=49 -.[49]=50 -.[50]=51 -.[51]=52 -.[52]=53 -.[53]=54 -.[54]=55 -.[55]=56 -.[56]=57 -.[57]=58 -.[58]=59 -.[59]=60 -.[60]=61 -.[61]=62 -.[62]=63 -.[63]=64 -.[64]=65 -.[65]=66 -.[66]=67 -.[67]=68 -.[68]=69 -.[69]=70 -.[70]=71 -.[71]=72 -.[72]=73 -.[73]=74 -.[74]=75 -.[75]=76 -.[76]=77 -.[77]=78 -.[78]=79 -.[79]=80 -.[80]=81 -.[81]=82 -.[82]=83 -.[83]=84 -.[84]=85 -.[85]=86 -.[86]=87 -.[87]=88 -.[88]=89 -.[89]=90 -.[90]=91 -.[91]=92 -.[92]=93 -.[93]=94 -.[94]=95 -.[95]=96 -.[96]=97 -.[97]=98 -.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json deleted file mode 100644 index 7809d6c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json +++ /dev/null @@ -1 +0,0 @@ -[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected deleted file mode 100644 index 5c9f48e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected +++ /dev/null @@ -1,5 +0,0 @@ -.=[] -.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" -.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" -.[2]="ccccccccccccccccccccccc" -.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json deleted file mode 100644 index 7f6c516..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json +++ /dev/null @@ -1,4 +0,0 @@ -[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", - "ccccccccccccccccccccccc", - "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected deleted file mode 100644 index d761fce..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json deleted file mode 100644 index 11f11f9..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json +++ /dev/null @@ -1 +0,0 @@ -0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected deleted file mode 100644 index 650e37c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json deleted file mode 100644 index bf11bce..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json +++ /dev/null @@ -1 +0,0 @@ --0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected deleted file mode 100644 index 1da2d39..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json deleted file mode 100644 index a92b6bd..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json +++ /dev/null @@ -1,3 +0,0 @@ -1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected deleted file mode 100644 index 013f424..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json deleted file mode 100644 index 17eeb99..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json +++ /dev/null @@ -1,2 +0,0 @@ -"abcdef" - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json deleted file mode 100644 index d0aaea2..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json +++ /dev/null @@ -1,2 +0,0 @@ -null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected deleted file mode 100644 index 49be55a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=true - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json deleted file mode 100644 index 7eead1e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json +++ /dev/null @@ -1,2 +0,0 @@ -true - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected deleted file mode 100644 index fe55a6a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=false - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json deleted file mode 100644 index a864bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json +++ /dev/null @@ -1,2 +0,0 @@ -false - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json deleted file mode 100644 index fd78837..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json +++ /dev/null @@ -1,3 +0,0 @@ -// C++ style comment -null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected deleted file mode 100644 index c8db822..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json deleted file mode 100644 index fc95f0f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json +++ /dev/null @@ -1,4 +0,0 @@ -/* C style comment - */ -null - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected deleted file mode 100644 index 0b8f42d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected +++ /dev/null @@ -1,8 +0,0 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json deleted file mode 100644 index 0de8f9c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected deleted file mode 100644 index 7573c88..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected +++ /dev/null @@ -1,20 +0,0 @@ -.={} -.attribute=[] -.attribute[0]="random" -.attribute[1]="short" -.attribute[2]="bold" -.attribute[3]=12 -.attribute[4]={} -.attribute[4].height=7 -.attribute[4].width=64 -.count=1234 -.name={} -.name.aka="T.E.S.T." -.name.id=123987 -.test={} -.test.1={} -.test.1.2={} -.test.1.2.3={} -.test.1.2.3.coord=[] -.test.1.2.3.coord[0]=1 -.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json deleted file mode 100644 index cc0f30f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "count" : 1234, - "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, - "attribute" : [ - "random", - "short", - "bold", - 12, - { "height" : 7, "width" : 64 } - ], - "test": { "1" : - { "2" : - { "3" : { "coord" : [ 1,2] } - } - } - } -} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected deleted file mode 100644 index 593f1db..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected +++ /dev/null @@ -1 +0,0 @@ -.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json deleted file mode 100644 index 5ab12ff..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max signed integer -2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected deleted file mode 100644 index 4b83bd7..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected +++ /dev/null @@ -1 +0,0 @@ -.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json deleted file mode 100644 index 056c850..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json +++ /dev/null @@ -1,2 +0,0 @@ -// Min signed integer --2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected deleted file mode 100644 index 37c1cb1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected +++ /dev/null @@ -1 +0,0 @@ -.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json deleted file mode 100644 index 12ef3fb..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json +++ /dev/null @@ -1,2 +0,0 @@ -// Max unsigned integer -4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected deleted file mode 100644 index b7b548e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=0 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json deleted file mode 100644 index bf81499..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// Min unsigned integer -0 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected deleted file mode 100644 index 0caea9d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json deleted file mode 100644 index d474e1b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json +++ /dev/null @@ -1,2 +0,0 @@ -1 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected deleted file mode 100644 index bc9520a1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json deleted file mode 100644 index 360d660..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected deleted file mode 100644 index 39eb798..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json deleted file mode 100644 index 11d8513..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ --9223372036854775808 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected deleted file mode 100644 index 831f432..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected +++ /dev/null @@ -1 +0,0 @@ -.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json deleted file mode 100644 index 6e1fb04..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json +++ /dev/null @@ -1,2 +0,0 @@ -18446744073709551615 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected deleted file mode 100644 index ee2fafc..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected +++ /dev/null @@ -1,2122 +0,0 @@ -.=[] -.[0]=[] -.[0][0]="A" -.[0][1]=0 -.[0][2]=1 -.[0][3]=2 -.[0][4]=3 -.[0][5]=4 -.[0][6]=5 -.[0][7]=6 -.[0][8]=7 -.[0][9]=8 -.[0][10]=9 -.[0][11]=10 -.[0][12]=11 -.[0][13]=12 -.[0][14]=13 -.[0][15]=14 -.[0][16]=15 -.[0][17]=16 -.[0][18]=17 -.[0][19]=18 -.[0][20]=19 -.[0][21]=20 -.[0][22]=21 -.[0][23]=22 -.[0][24]=23 -.[0][25]=24 -.[0][26]=25 -.[0][27]=26 -.[0][28]=27 -.[0][29]=28 -.[0][30]=29 -.[0][31]=30 -.[0][32]=31 -.[0][33]=32 -.[0][34]=33 -.[0][35]=34 -.[0][36]=35 -.[0][37]=36 -.[0][38]=37 -.[0][39]=38 -.[0][40]=39 -.[0][41]=40 -.[0][42]=41 -.[0][43]=42 -.[0][44]=43 -.[0][45]=44 -.[0][46]=45 -.[0][47]=46 -.[0][48]=47 -.[0][49]=48 -.[0][50]=49 -.[0][51]=50 -.[0][52]=51 -.[0][53]=52 -.[0][54]=53 -.[0][55]=54 -.[0][56]=55 -.[0][57]=56 -.[0][58]=57 -.[0][59]=58 -.[0][60]=59 -.[0][61]=60 -.[0][62]=61 -.[0][63]=62 -.[0][64]=63 -.[0][65]=64 -.[0][66]=65 -.[0][67]=66 -.[0][68]=67 -.[0][69]=68 -.[0][70]=69 -.[0][71]=70 -.[0][72]=71 -.[0][73]=72 -.[0][74]=73 -.[0][75]=74 -.[0][76]=75 -.[0][77]=76 -.[0][78]=77 -.[0][79]=78 -.[0][80]=79 -.[0][81]=80 -.[0][82]=81 -.[0][83]=82 -.[0][84]=83 -.[0][85]=84 -.[0][86]=85 -.[0][87]=86 -.[0][88]=87 -.[0][89]=88 -.[0][90]=89 -.[0][91]=90 -.[0][92]=91 -.[0][93]=92 -.[0][94]=93 -.[0][95]=94 -.[0][96]=95 -.[0][97]=96 -.[0][98]=97 -.[0][99]=98 -.[0][100]=99 -.[0][101]=100 -.[0][102]=101 -.[0][103]=102 -.[0][104]=103 -.[0][105]=104 -.[0][106]=105 -.[0][107]=106 -.[0][108]=107 -.[0][109]=108 -.[0][110]=109 -.[0][111]=110 -.[0][112]=111 -.[0][113]=112 -.[0][114]=113 -.[0][115]=114 -.[0][116]=115 -.[0][117]=116 -.[0][118]=117 -.[0][119]=118 -.[0][120]=119 -.[0][121]=120 -.[0][122]=121 -.[0][123]=122 -.[0][124]=123 -.[0][125]=124 -.[0][126]=125 -.[0][127]=126 -.[0][128]=127 -.[0][129]=128 -.[0][130]=129 -.[0][131]=130 -.[0][132]=131 -.[0][133]=132 -.[0][134]=133 -.[0][135]=134 -.[0][136]=135 -.[0][137]=136 -.[0][138]=137 -.[0][139]=138 -.[0][140]=139 -.[0][141]=140 -.[0][142]=141 -.[0][143]=142 -.[0][144]=143 -.[0][145]=144 -.[0][146]=145 -.[0][147]=146 -.[0][148]=147 -.[0][149]=148 -.[0][150]=149 -.[0][151]=150 -.[0][152]=151 -.[0][153]=152 -.[0][154]=153 -.[0][155]=154 -.[0][156]=155 -.[0][157]=156 -.[0][158]=157 -.[0][159]=158 -.[0][160]=159 -.[0][161]=160 -.[0][162]=161 -.[0][163]=162 -.[0][164]=163 -.[0][165]=164 -.[0][166]=165 -.[0][167]=166 -.[0][168]=167 -.[0][169]=168 -.[0][170]=169 -.[0][171]=170 -.[0][172]=171 -.[0][173]=172 -.[0][174]=173 -.[0][175]=174 -.[0][176]=175 -.[0][177]=176 -.[0][178]=177 -.[0][179]=178 -.[0][180]=179 -.[0][181]=180 -.[0][182]=181 -.[0][183]=182 -.[0][184]=183 -.[0][185]=184 -.[0][186]=185 -.[0][187]=186 -.[0][188]=187 -.[0][189]=188 -.[0][190]=189 -.[0][191]=190 -.[0][192]=191 -.[0][193]=192 -.[0][194]=193 -.[0][195]=194 -.[0][196]=195 -.[0][197]=196 -.[0][198]=197 -.[0][199]=198 -.[0][200]=199 -.[0][201]=200 -.[0][202]=201 -.[0][203]=202 -.[0][204]=203 -.[0][205]=204 -.[0][206]=205 -.[0][207]=206 -.[0][208]=207 -.[0][209]=208 -.[0][210]=209 -.[0][211]=210 -.[0][212]=211 -.[0][213]=212 -.[0][214]=213 -.[0][215]=214 -.[0][216]=215 -.[0][217]=216 -.[0][218]=217 -.[0][219]=218 -.[0][220]=219 -.[0][221]=220 -.[0][222]=221 -.[0][223]=222 -.[0][224]=223 -.[0][225]=224 -.[0][226]=225 -.[0][227]=226 -.[0][228]=227 -.[0][229]=228 -.[0][230]=229 -.[0][231]=230 -.[0][232]=231 -.[0][233]=232 -.[0][234]=233 -.[0][235]=234 -.[0][236]=235 -.[0][237]=236 -.[0][238]=237 -.[0][239]=238 -.[0][240]=239 -.[0][241]=240 -.[0][242]=241 -.[0][243]=242 -.[0][244]=243 -.[0][245]=244 -.[0][246]=245 -.[0][247]=246 -.[0][248]=247 -.[0][249]=248 -.[0][250]=249 -.[0][251]=250 -.[0][252]=251 -.[0][253]=252 -.[0][254]=253 -.[0][255]=254 -.[0][256]=255 -.[0][257]=256 -.[0][258]=257 -.[0][259]=258 -.[0][260]=259 -.[0][261]=260 -.[0][262]=261 -.[0][263]=262 -.[0][264]=263 -.[0][265]=264 -.[0][266]=265 -.[0][267]=266 -.[0][268]=267 -.[0][269]=268 -.[0][270]=269 -.[0][271]=270 -.[0][272]=271 -.[0][273]=272 -.[0][274]=273 -.[0][275]=274 -.[0][276]=275 -.[0][277]=276 -.[0][278]=277 -.[0][279]=278 -.[0][280]=279 -.[0][281]=280 -.[0][282]=281 -.[0][283]=282 -.[0][284]=283 -.[0][285]=284 -.[0][286]=285 -.[0][287]=286 -.[0][288]=287 -.[0][289]=288 -.[0][290]=289 -.[0][291]=290 -.[0][292]=291 -.[0][293]=292 -.[0][294]=293 -.[0][295]=294 -.[0][296]=295 -.[0][297]=296 -.[0][298]=297 -.[0][299]=298 -.[0][300]=299 -.[0][301]=300 -.[0][302]=301 -.[0][303]=302 -.[0][304]=303 -.[0][305]=304 -.[0][306]=305 -.[0][307]=306 -.[0][308]=307 -.[0][309]=308 -.[0][310]=309 -.[0][311]=310 -.[0][312]=311 -.[0][313]=312 -.[0][314]=313 -.[0][315]=314 -.[0][316]=315 -.[0][317]=316 -.[0][318]=317 -.[0][319]=318 -.[0][320]=319 -.[0][321]=320 -.[0][322]=321 -.[0][323]=322 -.[0][324]=323 -.[0][325]=324 -.[0][326]=325 -.[0][327]=326 -.[0][328]=327 -.[0][329]=328 -.[0][330]=329 -.[0][331]=330 -.[0][332]=331 -.[0][333]=332 -.[0][334]=333 -.[0][335]=334 -.[0][336]=335 -.[0][337]=336 -.[0][338]=337 -.[0][339]=338 -.[0][340]=339 -.[0][341]=340 -.[0][342]=341 -.[0][343]=342 -.[0][344]=343 -.[0][345]=344 -.[0][346]=345 -.[0][347]=346 -.[0][348]=347 -.[0][349]=348 -.[0][350]=349 -.[0][351]=350 -.[0][352]=351 -.[0][353]=352 -.[0][354]=353 -.[0][355]=354 -.[0][356]=355 -.[0][357]=356 -.[0][358]=357 -.[0][359]=358 -.[0][360]=359 -.[0][361]=360 -.[0][362]=361 -.[0][363]=362 -.[0][364]=363 -.[0][365]=364 -.[0][366]=365 -.[0][367]=366 -.[0][368]=367 -.[0][369]=368 -.[0][370]=369 -.[0][371]=370 -.[0][372]=371 -.[0][373]=372 -.[0][374]=373 -.[0][375]=374 -.[0][376]=375 -.[0][377]=376 -.[0][378]=377 -.[0][379]=378 -.[0][380]=379 -.[0][381]=380 -.[0][382]=381 -.[0][383]=382 -.[0][384]=383 -.[0][385]=384 -.[0][386]=385 -.[0][387]=386 -.[0][388]=387 -.[0][389]=388 -.[0][390]=389 -.[0][391]=390 -.[0][392]=391 -.[0][393]=392 -.[0][394]=393 -.[0][395]=394 -.[0][396]=395 -.[0][397]=396 -.[0][398]=397 -.[0][399]=398 -.[0][400]=399 -.[0][401]=400 -.[0][402]=401 -.[0][403]=402 -.[0][404]=403 -.[0][405]=404 -.[0][406]=405 -.[0][407]=406 -.[0][408]=407 -.[0][409]=408 -.[0][410]=409 -.[0][411]=410 -.[0][412]=411 -.[0][413]=412 -.[0][414]=413 -.[0][415]=414 -.[0][416]=415 -.[0][417]=416 -.[0][418]=417 -.[0][419]=418 -.[0][420]=419 -.[0][421]=420 -.[0][422]=421 -.[0][423]=422 -.[0][424]=423 -.[0][425]=424 -.[0][426]=425 -.[0][427]=426 -.[0][428]=427 -.[0][429]=428 -.[0][430]=429 -.[0][431]=430 -.[0][432]=431 -.[0][433]=432 -.[0][434]=433 -.[0][435]=434 -.[0][436]=435 -.[0][437]=436 -.[0][438]=437 -.[0][439]=438 -.[0][440]=439 -.[0][441]=440 -.[0][442]=441 -.[0][443]=442 -.[0][444]=443 -.[0][445]=444 -.[0][446]=445 -.[0][447]=446 -.[0][448]=447 -.[0][449]=448 -.[0][450]=449 -.[0][451]=450 -.[0][452]=451 -.[0][453]=452 -.[0][454]=453 -.[0][455]=454 -.[0][456]=455 -.[0][457]=456 -.[0][458]=457 -.[0][459]=458 -.[0][460]=459 -.[0][461]=460 -.[0][462]=461 -.[0][463]=462 -.[0][464]=463 -.[0][465]=464 -.[0][466]=465 -.[0][467]=466 -.[0][468]=467 -.[0][469]=468 -.[0][470]=469 -.[0][471]=470 -.[0][472]=471 -.[0][473]=472 -.[0][474]=473 -.[0][475]=474 -.[0][476]=475 -.[0][477]=476 -.[0][478]=477 -.[0][479]=478 -.[0][480]=479 -.[0][481]=480 -.[0][482]=481 -.[0][483]=482 -.[0][484]=483 -.[0][485]=484 -.[0][486]=485 -.[0][487]=486 -.[0][488]=487 -.[0][489]=488 -.[0][490]=489 -.[0][491]=490 -.[0][492]=491 -.[0][493]=492 -.[0][494]=493 -.[0][495]=494 -.[0][496]=495 -.[0][497]=496 -.[0][498]=497 -.[0][499]=498 -.[0][500]=499 -.[0][501]=500 -.[0][502]=501 -.[0][503]=502 -.[0][504]=503 -.[0][505]=504 -.[0][506]=505 -.[0][507]=506 -.[0][508]=507 -.[0][509]=508 -.[0][510]=509 -.[0][511]=510 -.[0][512]=511 -.[0][513]=512 -.[0][514]=513 -.[0][515]=514 -.[0][516]=515 -.[0][517]=516 -.[0][518]=517 -.[0][519]=518 -.[0][520]=519 -.[0][521]=520 -.[0][522]=521 -.[0][523]=522 -.[0][524]=523 -.[0][525]=524 -.[0][526]=525 -.[0][527]=526 -.[0][528]=527 -.[0][529]=528 -.[0][530]=529 -.[0][531]=530 -.[0][532]=531 -.[0][533]=532 -.[0][534]=533 -.[0][535]=534 -.[0][536]=535 -.[0][537]=536 -.[0][538]=537 -.[0][539]=538 -.[0][540]=539 -.[0][541]=540 -.[0][542]=541 -.[0][543]=542 -.[0][544]=543 -.[0][545]=544 -.[0][546]=545 -.[0][547]=546 -.[0][548]=547 -.[0][549]=548 -.[0][550]=549 -.[0][551]=550 -.[0][552]=551 -.[0][553]=552 -.[0][554]=553 -.[0][555]=554 -.[0][556]=555 -.[0][557]=556 -.[0][558]=557 -.[0][559]=558 -.[0][560]=559 -.[0][561]=560 -.[0][562]=561 -.[0][563]=562 -.[0][564]=563 -.[0][565]=564 -.[0][566]=565 -.[0][567]=566 -.[0][568]=567 -.[0][569]=568 -.[0][570]=569 -.[0][571]=570 -.[0][572]=571 -.[0][573]=572 -.[0][574]=573 -.[0][575]=574 -.[0][576]=575 -.[0][577]=576 -.[0][578]=577 -.[0][579]=578 -.[0][580]=579 -.[0][581]=580 -.[0][582]=581 -.[0][583]=582 -.[0][584]=583 -.[0][585]=584 -.[0][586]=585 -.[0][587]=586 -.[0][588]=587 -.[0][589]=588 -.[0][590]=589 -.[0][591]=590 -.[0][592]=591 -.[0][593]=592 -.[0][594]=593 -.[0][595]=594 -.[0][596]=595 -.[0][597]=596 -.[0][598]=597 -.[0][599]=598 -.[0][600]=599 -.[0][601]=600 -.[0][602]=601 -.[0][603]=602 -.[0][604]=603 -.[0][605]=604 -.[0][606]=605 -.[0][607]=606 -.[0][608]=607 -.[0][609]=608 -.[0][610]=609 -.[0][611]=610 -.[0][612]=611 -.[0][613]=612 -.[0][614]=613 -.[0][615]=614 -.[0][616]=615 -.[0][617]=616 -.[0][618]=617 -.[0][619]=618 -.[0][620]=619 -.[0][621]=620 -.[0][622]=621 -.[0][623]=622 -.[0][624]=623 -.[0][625]=624 -.[0][626]=625 -.[0][627]=626 -.[0][628]=627 -.[0][629]=628 -.[0][630]=629 -.[0][631]=630 -.[0][632]=631 -.[0][633]=632 -.[0][634]=633 -.[0][635]=634 -.[0][636]=635 -.[0][637]=636 -.[0][638]=637 -.[0][639]=638 -.[0][640]=639 -.[0][641]=640 -.[0][642]=641 -.[0][643]=642 -.[0][644]=643 -.[0][645]=644 -.[0][646]=645 -.[0][647]=646 -.[0][648]=647 -.[0][649]=648 -.[0][650]=649 -.[0][651]=650 -.[0][652]=651 -.[0][653]=652 -.[0][654]=653 -.[0][655]=654 -.[0][656]=655 -.[0][657]=656 -.[0][658]=657 -.[0][659]=658 -.[0][660]=659 -.[0][661]=660 -.[0][662]=661 -.[0][663]=662 -.[0][664]=663 -.[0][665]=664 -.[0][666]=665 -.[0][667]=666 -.[0][668]=667 -.[0][669]=668 -.[0][670]=669 -.[0][671]=670 -.[0][672]=671 -.[0][673]=672 -.[0][674]=673 -.[0][675]=674 -.[0][676]=675 -.[0][677]=676 -.[0][678]=677 -.[0][679]=678 -.[0][680]=679 -.[0][681]=680 -.[0][682]=681 -.[0][683]=682 -.[0][684]=683 -.[0][685]=684 -.[0][686]=685 -.[0][687]=686 -.[0][688]=687 -.[0][689]=688 -.[0][690]=689 -.[0][691]=690 -.[0][692]=691 -.[0][693]=692 -.[0][694]=693 -.[0][695]=694 -.[0][696]=695 -.[0][697]=696 -.[0][698]=697 -.[0][699]=698 -.[0][700]=699 -.[0][701]=700 -.[0][702]=701 -.[0][703]=702 -.[0][704]=703 -.[0][705]=704 -.[0][706]=705 -.[0][707]=706 -.[0][708]=707 -.[0][709]=708 -.[0][710]=709 -.[0][711]=710 -.[0][712]=711 -.[0][713]=712 -.[0][714]=713 -.[0][715]=714 -.[0][716]=715 -.[0][717]=716 -.[0][718]=717 -.[0][719]=718 -.[0][720]=719 -.[0][721]=720 -.[0][722]=721 -.[0][723]=722 -.[0][724]=723 -.[0][725]=724 -.[0][726]=725 -.[0][727]=726 -.[0][728]=727 -.[0][729]=728 -.[0][730]=729 -.[0][731]=730 -.[0][732]=731 -.[0][733]=732 -.[0][734]=733 -.[0][735]=734 -.[0][736]=735 -.[0][737]=736 -.[0][738]=737 -.[0][739]=738 -.[0][740]=739 -.[0][741]=740 -.[0][742]=741 -.[0][743]=742 -.[0][744]=743 -.[0][745]=744 -.[0][746]=745 -.[0][747]=746 -.[0][748]=747 -.[0][749]=748 -.[0][750]=749 -.[0][751]=750 -.[0][752]=751 -.[0][753]=752 -.[0][754]=753 -.[0][755]=754 -.[0][756]=755 -.[0][757]=756 -.[0][758]=757 -.[0][759]=758 -.[0][760]=759 -.[0][761]=760 -.[0][762]=761 -.[0][763]=762 -.[0][764]=763 -.[0][765]=764 -.[0][766]=765 -.[0][767]=766 -.[0][768]=767 -.[0][769]=768 -.[0][770]=769 -.[0][771]=770 -.[0][772]=771 -.[0][773]=772 -.[0][774]=773 -.[0][775]=774 -.[0][776]=775 -.[0][777]=776 -.[0][778]=777 -.[0][779]=778 -.[0][780]=779 -.[0][781]=780 -.[0][782]=781 -.[0][783]=782 -.[0][784]=783 -.[0][785]=784 -.[0][786]=785 -.[0][787]=786 -.[0][788]=787 -.[0][789]=788 -.[0][790]=789 -.[0][791]=790 -.[0][792]=791 -.[0][793]=792 -.[0][794]=793 -.[0][795]=794 -.[0][796]=795 -.[0][797]=796 -.[0][798]=797 -.[0][799]=798 -.[0][800]=799 -.[0][801]=800 -.[0][802]=801 -.[0][803]=802 -.[0][804]=803 -.[0][805]=804 -.[0][806]=805 -.[0][807]=806 -.[0][808]=807 -.[0][809]=808 -.[0][810]=809 -.[0][811]=810 -.[0][812]=811 -.[0][813]=812 -.[0][814]=813 -.[0][815]=814 -.[0][816]=815 -.[0][817]=816 -.[0][818]=817 -.[0][819]=818 -.[0][820]=819 -.[0][821]=820 -.[0][822]=821 -.[0][823]=822 -.[0][824]=823 -.[0][825]=824 -.[0][826]=825 -.[0][827]=826 -.[0][828]=827 -.[0][829]=828 -.[0][830]=829 -.[0][831]=830 -.[0][832]=831 -.[0][833]=832 -.[0][834]=833 -.[0][835]=834 -.[0][836]=835 -.[0][837]=836 -.[0][838]=837 -.[0][839]=838 -.[0][840]=839 -.[0][841]=840 -.[0][842]=841 -.[0][843]=842 -.[0][844]=843 -.[0][845]=844 -.[0][846]=845 -.[0][847]=846 -.[0][848]=847 -.[0][849]=848 -.[0][850]=849 -.[0][851]=850 -.[0][852]=851 -.[0][853]=852 -.[0][854]=853 -.[0][855]=854 -.[0][856]=855 -.[0][857]=856 -.[0][858]=857 -.[0][859]=858 -.[0][860]=859 -.[0][861]=860 -.[0][862]=861 -.[0][863]=862 -.[0][864]=863 -.[0][865]=864 -.[0][866]=865 -.[0][867]=866 -.[0][868]=867 -.[0][869]=868 -.[0][870]=869 -.[0][871]=870 -.[0][872]=871 -.[0][873]=872 -.[0][874]=873 -.[0][875]=874 -.[0][876]=875 -.[0][877]=876 -.[0][878]=877 -.[0][879]=878 -.[0][880]=879 -.[0][881]=880 -.[0][882]=881 -.[0][883]=882 -.[0][884]=883 -.[0][885]=884 -.[0][886]=885 -.[0][887]=886 -.[0][888]=887 -.[0][889]=888 -.[0][890]=889 -.[0][891]=890 -.[0][892]=891 -.[0][893]=892 -.[0][894]=893 -.[0][895]=894 -.[0][896]=895 -.[0][897]=896 -.[0][898]=897 -.[0][899]=898 -.[0][900]=899 -.[0][901]=900 -.[0][902]=901 -.[0][903]=902 -.[0][904]=903 -.[0][905]=904 -.[0][906]=905 -.[0][907]=906 -.[0][908]=907 -.[0][909]=908 -.[0][910]=909 -.[0][911]=910 -.[0][912]=911 -.[0][913]=912 -.[0][914]=913 -.[0][915]=914 -.[0][916]=915 -.[0][917]=916 -.[0][918]=917 -.[0][919]=918 -.[0][920]=919 -.[0][921]=920 -.[0][922]=921 -.[0][923]=922 -.[0][924]=923 -.[0][925]=924 -.[0][926]=925 -.[0][927]=926 -.[0][928]=927 -.[0][929]=928 -.[0][930]=929 -.[0][931]=930 -.[0][932]=931 -.[0][933]=932 -.[0][934]=933 -.[0][935]=934 -.[0][936]=935 -.[0][937]=936 -.[0][938]=937 -.[0][939]=938 -.[0][940]=939 -.[0][941]=940 -.[0][942]=941 -.[0][943]=942 -.[0][944]=943 -.[0][945]=944 -.[0][946]=945 -.[0][947]=946 -.[0][948]=947 -.[0][949]=948 -.[0][950]=949 -.[0][951]=950 -.[0][952]=951 -.[0][953]=952 -.[0][954]=953 -.[0][955]=954 -.[0][956]=955 -.[0][957]=956 -.[0][958]=957 -.[0][959]=958 -.[0][960]=959 -.[0][961]=960 -.[0][962]=961 -.[0][963]=962 -.[0][964]=963 -.[0][965]=964 -.[0][966]=965 -.[0][967]=966 -.[0][968]=967 -.[0][969]=968 -.[0][970]=969 -.[0][971]=970 -.[0][972]=971 -.[0][973]=972 -.[0][974]=973 -.[0][975]=974 -.[0][976]=975 -.[0][977]=976 -.[0][978]=977 -.[0][979]=978 -.[0][980]=979 -.[0][981]=980 -.[0][982]=981 -.[0][983]=982 -.[0][984]=983 -.[0][985]=984 -.[0][986]=985 -.[0][987]=986 -.[0][988]=987 -.[0][989]=988 -.[0][990]=989 -.[0][991]=990 -.[0][992]=991 -.[0][993]=992 -.[0][994]=993 -.[0][995]=994 -.[0][996]=995 -.[0][997]=996 -.[0][998]=997 -.[0][999]=998 -.[0][1000]=999 -.[0][1001]=1000 -.[0][1002]=1001 -.[0][1003]=1002 -.[0][1004]=1003 -.[0][1005]=1004 -.[0][1006]=1005 -.[0][1007]=1006 -.[0][1008]=1007 -.[0][1009]=1008 -.[0][1010]=1009 -.[0][1011]=1010 -.[0][1012]=1011 -.[0][1013]=1012 -.[0][1014]=1013 -.[0][1015]=1014 -.[0][1016]=1015 -.[0][1017]=1016 -.[0][1018]=1017 -.[0][1019]=1018 -.[0][1020]=1019 -.[0][1021]=1020 -.[0][1022]=1021 -.[0][1023]=1022 -.[0][1024]=1023 -.[0][1025]=1024 -.[0][1026]=1025 -.[0][1027]=1026 -.[0][1028]=1027 -.[0][1029]=1028 -.[0][1030]=1029 -.[0][1031]=1030 -.[0][1032]=1031 -.[0][1033]=1032 -.[0][1034]=1033 -.[0][1035]=1034 -.[0][1036]=1035 -.[0][1037]=1036 -.[0][1038]=1037 -.[0][1039]=1038 -.[0][1040]=1039 -.[0][1041]=1040 -.[0][1042]=1041 -.[0][1043]=1042 -.[0][1044]=1043 -.[0][1045]=1044 -.[0][1046]=1045 -.[0][1047]=1046 -.[0][1048]=1047 -.[0][1049]=1048 -.[0][1050]=1049 -.[0][1051]=1050 -.[0][1052]=1051 -.[0][1053]=1052 -.[0][1054]=1053 -.[0][1055]=1054 -.[0][1056]=1055 -.[0][1057]=1056 -.[0][1058]=1057 -.[0][1059]=1058 -.[0][1060]=1059 -.[0][1061]=1060 -.[0][1062]=1061 -.[0][1063]=1062 -.[0][1064]=1063 -.[0][1065]=1064 -.[0][1066]=1065 -.[0][1067]=1066 -.[0][1068]=1067 -.[0][1069]=1068 -.[0][1070]=1069 -.[0][1071]=1070 -.[0][1072]=1071 -.[0][1073]=1072 -.[0][1074]=1073 -.[0][1075]=1074 -.[0][1076]=1075 -.[0][1077]=1076 -.[0][1078]=1077 -.[0][1079]=1078 -.[0][1080]=1079 -.[0][1081]=1080 -.[0][1082]=1081 -.[0][1083]=1082 -.[0][1084]=1083 -.[0][1085]=1084 -.[0][1086]=1085 -.[0][1087]=1086 -.[0][1088]=1087 -.[0][1089]=1088 -.[0][1090]=1089 -.[0][1091]=1090 -.[0][1092]=1091 -.[0][1093]=1092 -.[0][1094]=1093 -.[0][1095]=1094 -.[0][1096]=1095 -.[0][1097]=1096 -.[0][1098]=1097 -.[0][1099]=1098 -.[0][1100]=1099 -.[0][1101]=1100 -.[0][1102]=1101 -.[0][1103]=1102 -.[0][1104]=1103 -.[0][1105]=1104 -.[0][1106]=1105 -.[0][1107]=1106 -.[0][1108]=1107 -.[0][1109]=1108 -.[0][1110]=1109 -.[0][1111]=1110 -.[0][1112]=1111 -.[0][1113]=1112 -.[0][1114]=1113 -.[0][1115]=1114 -.[0][1116]=1115 -.[0][1117]=1116 -.[0][1118]=1117 -.[0][1119]=1118 -.[0][1120]=1119 -.[0][1121]=1120 -.[0][1122]=1121 -.[0][1123]=1122 -.[0][1124]=1123 -.[0][1125]=1124 -.[0][1126]=1125 -.[0][1127]=1126 -.[0][1128]=1127 -.[0][1129]=1128 -.[0][1130]=1129 -.[0][1131]=1130 -.[0][1132]=1131 -.[0][1133]=1132 -.[0][1134]=1133 -.[0][1135]=1134 -.[0][1136]=1135 -.[0][1137]=1136 -.[0][1138]=1137 -.[0][1139]=1138 -.[0][1140]=1139 -.[0][1141]=1140 -.[0][1142]=1141 -.[0][1143]=1142 -.[0][1144]=1143 -.[0][1145]=1144 -.[0][1146]=1145 -.[0][1147]=1146 -.[0][1148]=1147 -.[0][1149]=1148 -.[0][1150]=1149 -.[0][1151]=1150 -.[0][1152]=1151 -.[0][1153]=1152 -.[0][1154]=1153 -.[0][1155]=1154 -.[0][1156]=1155 -.[0][1157]=1156 -.[0][1158]=1157 -.[0][1159]=1158 -.[0][1160]=1159 -.[0][1161]=1160 -.[0][1162]=1161 -.[0][1163]=1162 -.[0][1164]=1163 -.[0][1165]=1164 -.[0][1166]=1165 -.[0][1167]=1166 -.[0][1168]=1167 -.[0][1169]=1168 -.[0][1170]=1169 -.[0][1171]=1170 -.[0][1172]=1171 -.[0][1173]=1172 -.[0][1174]=1173 -.[0][1175]=1174 -.[0][1176]=1175 -.[0][1177]=1176 -.[0][1178]=1177 -.[0][1179]=1178 -.[0][1180]=1179 -.[0][1181]=1180 -.[0][1182]=1181 -.[0][1183]=1182 -.[0][1184]=1183 -.[0][1185]=1184 -.[0][1186]=1185 -.[0][1187]=1186 -.[0][1188]=1187 -.[0][1189]=1188 -.[0][1190]=1189 -.[0][1191]=1190 -.[0][1192]=1191 -.[0][1193]=1192 -.[0][1194]=1193 -.[0][1195]=1194 -.[0][1196]=1195 -.[0][1197]=1196 -.[0][1198]=1197 -.[0][1199]=1198 -.[0][1200]=1199 -.[0][1201]=1200 -.[0][1202]=1201 -.[0][1203]=1202 -.[0][1204]=1203 -.[0][1205]=1204 -.[0][1206]=1205 -.[0][1207]=1206 -.[0][1208]=1207 -.[0][1209]=1208 -.[0][1210]=1209 -.[0][1211]=1210 -.[0][1212]=1211 -.[0][1213]=1212 -.[0][1214]=1213 -.[0][1215]=1214 -.[0][1216]=1215 -.[0][1217]=1216 -.[0][1218]=1217 -.[0][1219]=1218 -.[0][1220]=1219 -.[0][1221]=1220 -.[0][1222]=1221 -.[0][1223]=1222 -.[0][1224]=1223 -.[0][1225]=1224 -.[0][1226]=1225 -.[0][1227]=1226 -.[0][1228]=1227 -.[0][1229]=1228 -.[0][1230]=1229 -.[0][1231]=1230 -.[0][1232]=1231 -.[0][1233]=1232 -.[0][1234]=1233 -.[0][1235]=1234 -.[0][1236]=1235 -.[0][1237]=1236 -.[0][1238]=1237 -.[0][1239]=1238 -.[0][1240]=1239 -.[0][1241]=1240 -.[0][1242]=1241 -.[0][1243]=1242 -.[0][1244]=1243 -.[0][1245]=1244 -.[0][1246]=1245 -.[0][1247]=1246 -.[0][1248]=1247 -.[0][1249]=1248 -.[0][1250]=1249 -.[0][1251]=1250 -.[0][1252]=1251 -.[0][1253]=1252 -.[0][1254]=1253 -.[0][1255]=1254 -.[0][1256]=1255 -.[0][1257]=1256 -.[0][1258]=1257 -.[0][1259]=1258 -.[0][1260]=1259 -.[0][1261]=1260 -.[0][1262]=1261 -.[0][1263]=1262 -.[0][1264]=1263 -.[0][1265]=1264 -.[0][1266]=1265 -.[0][1267]=1266 -.[0][1268]=1267 -.[0][1269]=1268 -.[0][1270]=1269 -.[0][1271]=1270 -.[0][1272]=1271 -.[0][1273]=1272 -.[0][1274]=1273 -.[0][1275]=1274 -.[0][1276]=1275 -.[0][1277]=1276 -.[0][1278]=1277 -.[0][1279]=1278 -.[0][1280]=1279 -.[0][1281]=1280 -.[0][1282]=1281 -.[0][1283]=1282 -.[0][1284]=1283 -.[0][1285]=1284 -.[0][1286]=1285 -.[0][1287]=1286 -.[0][1288]=1287 -.[0][1289]=1288 -.[0][1290]=1289 -.[0][1291]=1290 -.[0][1292]=1291 -.[0][1293]=1292 -.[0][1294]=1293 -.[0][1295]=1294 -.[0][1296]=1295 -.[0][1297]=1296 -.[0][1298]=1297 -.[0][1299]=1298 -.[0][1300]=1299 -.[0][1301]=1300 -.[0][1302]=1301 -.[0][1303]=1302 -.[0][1304]=1303 -.[0][1305]=1304 -.[0][1306]=1305 -.[0][1307]=1306 -.[0][1308]=1307 -.[0][1309]=1308 -.[0][1310]=1309 -.[0][1311]=1310 -.[0][1312]=1311 -.[0][1313]=1312 -.[0][1314]=1313 -.[0][1315]=1314 -.[0][1316]=1315 -.[0][1317]=1316 -.[0][1318]=1317 -.[0][1319]=1318 -.[0][1320]=1319 -.[0][1321]=1320 -.[0][1322]=1321 -.[0][1323]=1322 -.[0][1324]=1323 -.[0][1325]=1324 -.[0][1326]=1325 -.[0][1327]=1326 -.[0][1328]=1327 -.[0][1329]=1328 -.[0][1330]=1329 -.[0][1331]=1330 -.[0][1332]=1331 -.[0][1333]=1332 -.[0][1334]=1333 -.[0][1335]=1334 -.[0][1336]=1335 -.[0][1337]=1336 -.[0][1338]=1337 -.[0][1339]=1338 -.[0][1340]=1339 -.[0][1341]=1340 -.[0][1342]=1341 -.[0][1343]=1342 -.[0][1344]=1343 -.[0][1345]=1344 -.[0][1346]=1345 -.[0][1347]=1346 -.[0][1348]=1347 -.[0][1349]=1348 -.[0][1350]=1349 -.[0][1351]=1350 -.[0][1352]=1351 -.[0][1353]=1352 -.[0][1354]=1353 -.[0][1355]=1354 -.[0][1356]=1355 -.[0][1357]=1356 -.[0][1358]=1357 -.[0][1359]=1358 -.[0][1360]=1359 -.[0][1361]=1360 -.[0][1362]=1361 -.[0][1363]=1362 -.[0][1364]=1363 -.[0][1365]=1364 -.[0][1366]=1365 -.[0][1367]=1366 -.[0][1368]=1367 -.[0][1369]=1368 -.[0][1370]=1369 -.[0][1371]=1370 -.[0][1372]=1371 -.[0][1373]=1372 -.[0][1374]=1373 -.[0][1375]=1374 -.[0][1376]=1375 -.[0][1377]=1376 -.[0][1378]=1377 -.[0][1379]=1378 -.[0][1380]=1379 -.[0][1381]=1380 -.[0][1382]=1381 -.[0][1383]=1382 -.[0][1384]=1383 -.[0][1385]=1384 -.[0][1386]=1385 -.[0][1387]=1386 -.[0][1388]=1387 -.[0][1389]=1388 -.[0][1390]=1389 -.[0][1391]=1390 -.[0][1392]=1391 -.[0][1393]=1392 -.[0][1394]=1393 -.[0][1395]=1394 -.[0][1396]=1395 -.[0][1397]=1396 -.[0][1398]=1397 -.[0][1399]=1398 -.[0][1400]=1399 -.[0][1401]=1400 -.[0][1402]=1401 -.[0][1403]=1402 -.[0][1404]=1403 -.[0][1405]=1404 -.[0][1406]=1405 -.[0][1407]=1406 -.[0][1408]=1407 -.[0][1409]=1408 -.[0][1410]=1409 -.[0][1411]=1410 -.[0][1412]=1411 -.[0][1413]=1412 -.[0][1414]=1413 -.[0][1415]=1414 -.[0][1416]=1415 -.[0][1417]=1416 -.[0][1418]=1417 -.[0][1419]=1418 -.[0][1420]=1419 -.[0][1421]=1420 -.[0][1422]=1421 -.[0][1423]=1422 -.[0][1424]=1423 -.[0][1425]=1424 -.[0][1426]=1425 -.[0][1427]=1426 -.[0][1428]=1427 -.[0][1429]=1428 -.[0][1430]=1429 -.[0][1431]=1430 -.[0][1432]=1431 -.[0][1433]=1432 -.[0][1434]=1433 -.[0][1435]=1434 -.[0][1436]=1435 -.[0][1437]=1436 -.[0][1438]=1437 -.[0][1439]=1438 -.[0][1440]=1439 -.[0][1441]=1440 -.[0][1442]=1441 -.[0][1443]=1442 -.[0][1444]=1443 -.[0][1445]=1444 -.[0][1446]=1445 -.[0][1447]=1446 -.[0][1448]=1447 -.[0][1449]=1448 -.[0][1450]=1449 -.[0][1451]=1450 -.[0][1452]=1451 -.[0][1453]=1452 -.[0][1454]=1453 -.[0][1455]=1454 -.[0][1456]=1455 -.[0][1457]=1456 -.[0][1458]=1457 -.[0][1459]=1458 -.[0][1460]=1459 -.[0][1461]=1460 -.[0][1462]=1461 -.[0][1463]=1462 -.[0][1464]=1463 -.[0][1465]=1464 -.[0][1466]=1465 -.[0][1467]=1466 -.[0][1468]=1467 -.[0][1469]=1468 -.[0][1470]=1469 -.[0][1471]=1470 -.[0][1472]=1471 -.[0][1473]=1472 -.[0][1474]=1473 -.[0][1475]=1474 -.[0][1476]=1475 -.[0][1477]=1476 -.[0][1478]=1477 -.[0][1479]=1478 -.[0][1480]=1479 -.[0][1481]=1480 -.[0][1482]=1481 -.[0][1483]=1482 -.[0][1484]=1483 -.[0][1485]=1484 -.[0][1486]=1485 -.[0][1487]=1486 -.[0][1488]=1487 -.[0][1489]=1488 -.[0][1490]=1489 -.[0][1491]=1490 -.[0][1492]=1491 -.[0][1493]=1492 -.[0][1494]=1493 -.[0][1495]=1494 -.[0][1496]=1495 -.[0][1497]=1496 -.[0][1498]=1497 -.[0][1499]=1498 -.[0][1500]=1499 -.[0][1501]=1500 -.[0][1502]=1501 -.[0][1503]=1502 -.[0][1504]=1503 -.[0][1505]=1504 -.[0][1506]=1505 -.[0][1507]=1506 -.[0][1508]=1507 -.[0][1509]=1508 -.[0][1510]=1509 -.[0][1511]=1510 -.[0][1512]=1511 -.[0][1513]=1512 -.[0][1514]=1513 -.[0][1515]=1514 -.[0][1516]=1515 -.[0][1517]=1516 -.[0][1518]=1517 -.[0][1519]=1518 -.[0][1520]=1519 -.[0][1521]=1520 -.[0][1522]=1521 -.[0][1523]=1522 -.[0][1524]=1523 -.[0][1525]=1524 -.[0][1526]=1525 -.[0][1527]=1526 -.[0][1528]=1527 -.[0][1529]=1528 -.[0][1530]=1529 -.[0][1531]=1530 -.[0][1532]=1531 -.[0][1533]=1532 -.[0][1534]=1533 -.[0][1535]=1534 -.[0][1536]=1535 -.[0][1537]=1536 -.[0][1538]=1537 -.[0][1539]=1538 -.[0][1540]=1539 -.[0][1541]=1540 -.[0][1542]=1541 -.[0][1543]=1542 -.[0][1544]=1543 -.[0][1545]=1544 -.[0][1546]=1545 -.[0][1547]=1546 -.[0][1548]=1547 -.[0][1549]=1548 -.[0][1550]=1549 -.[0][1551]=1550 -.[0][1552]=1551 -.[0][1553]=1552 -.[0][1554]=1553 -.[0][1555]=1554 -.[0][1556]=1555 -.[0][1557]=1556 -.[0][1558]=1557 -.[0][1559]=1558 -.[0][1560]=1559 -.[0][1561]=1560 -.[0][1562]=1561 -.[0][1563]=1562 -.[0][1564]=1563 -.[0][1565]=1564 -.[0][1566]=1565 -.[0][1567]=1566 -.[0][1568]=1567 -.[0][1569]=1568 -.[0][1570]=1569 -.[0][1571]=1570 -.[0][1572]=1571 -.[0][1573]=1572 -.[0][1574]=1573 -.[0][1575]=1574 -.[0][1576]=1575 -.[0][1577]=1576 -.[0][1578]=1577 -.[0][1579]=1578 -.[0][1580]=1579 -.[0][1581]=1580 -.[0][1582]=1581 -.[0][1583]=1582 -.[0][1584]=1583 -.[0][1585]=1584 -.[0][1586]=1585 -.[0][1587]=1586 -.[0][1588]=1587 -.[0][1589]=1588 -.[0][1590]=1589 -.[0][1591]=1590 -.[0][1592]=1591 -.[0][1593]=1592 -.[0][1594]=1593 -.[0][1595]=1594 -.[0][1596]=1595 -.[0][1597]=1596 -.[0][1598]=1597 -.[0][1599]=1598 -.[0][1600]=1599 -.[0][1601]=1600 -.[0][1602]=1601 -.[0][1603]=1602 -.[0][1604]=1603 -.[0][1605]=1604 -.[0][1606]=1605 -.[0][1607]=1606 -.[0][1608]=1607 -.[0][1609]=1608 -.[0][1610]=1609 -.[0][1611]=1610 -.[0][1612]=1611 -.[0][1613]=1612 -.[0][1614]=1613 -.[0][1615]=1614 -.[0][1616]=1615 -.[0][1617]=1616 -.[0][1618]=1617 -.[0][1619]=1618 -.[0][1620]=1619 -.[0][1621]=1620 -.[0][1622]=1621 -.[0][1623]=1622 -.[0][1624]=1623 -.[0][1625]=1624 -.[0][1626]=1625 -.[0][1627]=1626 -.[0][1628]=1627 -.[0][1629]=1628 -.[0][1630]=1629 -.[0][1631]=1630 -.[0][1632]=1631 -.[0][1633]=1632 -.[0][1634]=1633 -.[0][1635]=1634 -.[0][1636]=1635 -.[0][1637]=1636 -.[0][1638]=1637 -.[0][1639]=1638 -.[0][1640]=1639 -.[0][1641]=1640 -.[0][1642]=1641 -.[0][1643]=1642 -.[0][1644]=1643 -.[0][1645]=1644 -.[0][1646]=1645 -.[0][1647]=1646 -.[0][1648]=1647 -.[0][1649]=1648 -.[0][1650]=1649 -.[0][1651]=1650 -.[0][1652]=1651 -.[0][1653]=1652 -.[0][1654]=1653 -.[0][1655]=1654 -.[0][1656]=1655 -.[0][1657]=1656 -.[0][1658]=1657 -.[0][1659]=1658 -.[0][1660]=1659 -.[0][1661]=1660 -.[0][1662]=1661 -.[0][1663]=1662 -.[0][1664]=1663 -.[0][1665]=1664 -.[0][1666]=1665 -.[0][1667]=1666 -.[0][1668]=1667 -.[0][1669]=1668 -.[0][1670]=1669 -.[0][1671]=1670 -.[0][1672]=1671 -.[0][1673]=1672 -.[0][1674]=1673 -.[0][1675]=1674 -.[0][1676]=1675 -.[0][1677]=1676 -.[0][1678]=1677 -.[0][1679]=1678 -.[0][1680]=1679 -.[0][1681]=1680 -.[0][1682]=1681 -.[0][1683]=1682 -.[0][1684]=1683 -.[0][1685]=1684 -.[0][1686]=1685 -.[0][1687]=1686 -.[0][1688]=1687 -.[0][1689]=1688 -.[0][1690]=1689 -.[0][1691]=1690 -.[0][1692]=1691 -.[0][1693]=1692 -.[0][1694]=1693 -.[0][1695]=1694 -.[0][1696]=1695 -.[0][1697]=1696 -.[0][1698]=1697 -.[0][1699]=1698 -.[0][1700]=1699 -.[0][1701]=1700 -.[0][1702]=1701 -.[0][1703]=1702 -.[0][1704]=1703 -.[0][1705]=1704 -.[0][1706]=1705 -.[0][1707]=1706 -.[0][1708]=1707 -.[0][1709]=1708 -.[0][1710]=1709 -.[0][1711]=1710 -.[0][1712]=1711 -.[0][1713]=1712 -.[0][1714]=1713 -.[0][1715]=1714 -.[0][1716]=1715 -.[0][1717]=1716 -.[0][1718]=1717 -.[0][1719]=1718 -.[0][1720]=1719 -.[0][1721]=1720 -.[0][1722]=1721 -.[0][1723]=1722 -.[0][1724]=1723 -.[0][1725]=1724 -.[0][1726]=1725 -.[0][1727]=1726 -.[0][1728]=1727 -.[0][1729]=1728 -.[0][1730]=1729 -.[0][1731]=1730 -.[0][1732]=1731 -.[0][1733]=1732 -.[0][1734]=1733 -.[0][1735]=1734 -.[0][1736]=1735 -.[0][1737]=1736 -.[0][1738]=1737 -.[0][1739]=1738 -.[0][1740]=1739 -.[0][1741]=1740 -.[0][1742]=1741 -.[0][1743]=1742 -.[0][1744]=1743 -.[0][1745]=1744 -.[0][1746]=1745 -.[0][1747]=1746 -.[0][1748]=1747 -.[0][1749]=1748 -.[0][1750]=1749 -.[0][1751]=1750 -.[0][1752]=1751 -.[0][1753]=1752 -.[0][1754]=1753 -.[0][1755]=1754 -.[0][1756]=1755 -.[0][1757]=1756 -.[0][1758]=1757 -.[0][1759]=1758 -.[0][1760]=1759 -.[0][1761]=1760 -.[0][1762]=1761 -.[0][1763]=1762 -.[0][1764]=1763 -.[0][1765]=1764 -.[0][1766]=1765 -.[0][1767]=1766 -.[0][1768]=1767 -.[0][1769]=1768 -.[0][1770]=1769 -.[0][1771]=1770 -.[0][1772]=1771 -.[0][1773]=1772 -.[0][1774]=1773 -.[0][1775]=1774 -.[0][1776]=1775 -.[0][1777]=1776 -.[0][1778]=1777 -.[0][1779]=1778 -.[0][1780]=1779 -.[0][1781]=1780 -.[0][1782]=1781 -.[0][1783]=1782 -.[0][1784]=1783 -.[0][1785]=1784 -.[0][1786]=1785 -.[0][1787]=1786 -.[0][1788]=1787 -.[0][1789]=1788 -.[0][1790]=1789 -.[0][1791]=1790 -.[0][1792]=1791 -.[0][1793]=1792 -.[0][1794]=1793 -.[0][1795]=1794 -.[0][1796]=1795 -.[0][1797]=1796 -.[0][1798]=1797 -.[0][1799]=1798 -.[0][1800]=1799 -.[0][1801]=1800 -.[0][1802]=1801 -.[0][1803]=1802 -.[0][1804]=1803 -.[0][1805]=1804 -.[0][1806]=1805 -.[0][1807]=1806 -.[0][1808]=1807 -.[0][1809]=1808 -.[0][1810]=1809 -.[0][1811]=1810 -.[0][1812]=1811 -.[0][1813]=1812 -.[0][1814]=1813 -.[0][1815]=1814 -.[0][1816]=1815 -.[0][1817]=1816 -.[0][1818]=1817 -.[0][1819]=1818 -.[0][1820]=1819 -.[0][1821]=1820 -.[0][1822]=1821 -.[0][1823]=1822 -.[0][1824]=1823 -.[0][1825]=1824 -.[0][1826]=1825 -.[0][1827]=1826 -.[0][1828]=1827 -.[0][1829]=1828 -.[0][1830]=1829 -.[0][1831]=1830 -.[0][1832]=1831 -.[0][1833]=1832 -.[0][1834]=1833 -.[0][1835]=1834 -.[0][1836]=1835 -.[0][1837]=1836 -.[0][1838]=1837 -.[0][1839]=1838 -.[0][1840]=1839 -.[0][1841]=1840 -.[0][1842]=1841 -.[0][1843]=1842 -.[0][1844]=1843 -.[0][1845]=1844 -.[0][1846]=1845 -.[0][1847]=1846 -.[0][1848]=1847 -.[0][1849]=1848 -.[0][1850]=1849 -.[0][1851]=1850 -.[0][1852]=1851 -.[0][1853]=1852 -.[0][1854]=1853 -.[0][1855]=1854 -.[0][1856]=1855 -.[0][1857]=1856 -.[0][1858]=1857 -.[0][1859]=1858 -.[0][1860]=1859 -.[0][1861]=1860 -.[0][1862]=1861 -.[0][1863]=1862 -.[0][1864]=1863 -.[0][1865]=1864 -.[0][1866]=1865 -.[0][1867]=1866 -.[0][1868]=1867 -.[0][1869]=1868 -.[0][1870]=1869 -.[0][1871]=1870 -.[0][1872]=1871 -.[0][1873]=1872 -.[0][1874]=1873 -.[0][1875]=1874 -.[0][1876]=1875 -.[0][1877]=1876 -.[0][1878]=1877 -.[0][1879]=1878 -.[0][1880]=1879 -.[0][1881]=1880 -.[0][1882]=1881 -.[0][1883]=1882 -.[0][1884]=1883 -.[0][1885]=1884 -.[0][1886]=1885 -.[0][1887]=1886 -.[0][1888]=1887 -.[0][1889]=1888 -.[0][1890]=1889 -.[0][1891]=1890 -.[0][1892]=1891 -.[0][1893]=1892 -.[0][1894]=1893 -.[0][1895]=1894 -.[0][1896]=1895 -.[0][1897]=1896 -.[0][1898]=1897 -.[0][1899]=1898 -.[0][1900]=1899 -.[0][1901]=1900 -.[0][1902]=1901 -.[0][1903]=1902 -.[0][1904]=1903 -.[0][1905]=1904 -.[0][1906]=1905 -.[0][1907]=1906 -.[0][1908]=1907 -.[0][1909]=1908 -.[0][1910]=1909 -.[0][1911]=1910 -.[0][1912]=1911 -.[0][1913]=1912 -.[0][1914]=1913 -.[0][1915]=1914 -.[0][1916]=1915 -.[0][1917]=1916 -.[0][1918]=1917 -.[0][1919]=1918 -.[0][1920]=1919 -.[0][1921]=1920 -.[0][1922]=1921 -.[0][1923]=1922 -.[0][1924]=1923 -.[0][1925]=1924 -.[0][1926]=1925 -.[0][1927]=1926 -.[0][1928]=1927 -.[0][1929]=1928 -.[0][1930]=1929 -.[0][1931]=1930 -.[0][1932]=1931 -.[0][1933]=1932 -.[0][1934]=1933 -.[0][1935]=1934 -.[0][1936]=1935 -.[0][1937]=1936 -.[0][1938]=1937 -.[0][1939]=1938 -.[0][1940]=1939 -.[0][1941]=1940 -.[0][1942]=1941 -.[0][1943]=1942 -.[0][1944]=1943 -.[0][1945]=1944 -.[0][1946]=1945 -.[0][1947]=1946 -.[0][1948]=1947 -.[0][1949]=1948 -.[0][1950]=1949 -.[0][1951]=1950 -.[0][1952]=1951 -.[0][1953]=1952 -.[0][1954]=1953 -.[0][1955]=1954 -.[0][1956]=1955 -.[0][1957]=1956 -.[0][1958]=1957 -.[0][1959]=1958 -.[0][1960]=1959 -.[0][1961]=1960 -.[0][1962]=1961 -.[0][1963]=1962 -.[0][1964]=1963 -.[0][1965]=1964 -.[0][1966]=1965 -.[0][1967]=1966 -.[0][1968]=1967 -.[0][1969]=1968 -.[0][1970]=1969 -.[0][1971]=1970 -.[0][1972]=1971 -.[0][1973]=1972 -.[0][1974]=1973 -.[0][1975]=1974 -.[0][1976]=1975 -.[0][1977]=1976 -.[0][1978]=1977 -.[0][1979]=1978 -.[0][1980]=1979 -.[0][1981]=1980 -.[0][1982]=1981 -.[0][1983]=1982 -.[0][1984]=1983 -.[0][1985]=1984 -.[0][1986]=1985 -.[0][1987]=1986 -.[0][1988]=1987 -.[0][1989]=1988 -.[0][1990]=1989 -.[0][1991]=1990 -.[0][1992]=1991 -.[0][1993]=1992 -.[0][1994]=1993 -.[0][1995]=1994 -.[0][1996]=1995 -.[0][1997]=1996 -.[0][1998]=1997 -.[0][1999]=1998 -.[0][2000]=1999 -.[0][2001]=2000 -.[0][2002]=2001 -.[0][2003]=2002 -.[0][2004]=2003 -.[0][2005]=2004 -.[0][2006]=2005 -.[0][2007]=2006 -.[0][2008]=2007 -.[0][2009]=2008 -.[0][2010]=2009 -.[0][2011]=2010 -.[0][2012]=2011 -.[0][2013]=2012 -.[0][2014]=2013 -.[0][2015]=2014 -.[0][2016]=2015 -.[0][2017]=2016 -.[0][2018]=2017 -.[0][2019]=2018 -.[0][2020]=2019 -.[0][2021]=2020 -.[0][2022]=2021 -.[0][2023]=2022 -.[0][2024]=2023 -.[0][2025]=2024 -.[0][2026]=2025 -.[0][2027]=2026 -.[0][2028]=2027 -.[0][2029]=2028 -.[0][2030]=2029 -.[0][2031]=2030 -.[0][2032]=2031 -.[0][2033]=2032 -.[0][2034]=2033 -.[0][2035]=2034 -.[0][2036]=2035 -.[0][2037]=2036 -.[0][2038]=2037 -.[0][2039]=2038 -.[0][2040]=2039 -.[0][2041]=2040 -.[0][2042]=2041 -.[0][2043]=2042 -.[0][2044]=2043 -.[0][2045]=2044 -.[0][2046]=2045 -.[0][2047]=2046 -.[0][2048]=2047 -.[0][2049]=2048 -.[0][2050]=2049 -.[0][2051]=2050 -.[0][2052]=2051 -.[0][2053]=2052 -.[0][2054]=2053 -.[0][2055]=2054 -.[0][2056]=2055 -.[0][2057]=2056 -.[0][2058]=2057 -.[0][2059]=2058 -.[0][2060]=2059 -.[0][2061]=2060 -.[0][2062]=2061 -.[0][2063]=2062 -.[0][2064]=2063 -.[0][2065]=2064 -.[0][2066]=2065 -.[0][2067]=2066 -.[0][2068]=2067 -.[0][2069]=2068 -.[0][2070]=2069 -.[0][2071]=2070 -.[0][2072]=2071 -.[0][2073]=2072 -.[0][2074]=2073 -.[0][2075]=2074 -.[0][2076]=2075 -.[0][2077]=2076 -.[0][2078]=2077 -.[0][2079]=2078 -.[0][2080]=2079 -.[0][2081]=2080 -.[0][2082]=2081 -.[0][2083]=2082 -.[0][2084]=2083 -.[0][2085]=2084 -.[0][2086]=2085 -.[0][2087]=2086 -.[0][2088]=2087 -.[0][2089]=2088 -.[0][2090]=2089 -.[0][2091]=2090 -.[0][2092]=2091 -.[0][2093]=2092 -.[0][2094]=2093 -.[0][2095]=2094 -.[0][2096]=2095 -.[0][2097]=2096 -.[0][2098]=2097 -.[0][2099]=2098 -.[0][2100]=2099 -.[0][2101]=2100 -.[0][2102]=2101 -.[0][2103]=2102 -.[0][2104]=2103 -.[0][2105]=2104 -.[0][2106]=2105 -.[0][2107]=2106 -.[0][2108]=2107 -.[0][2109]=2108 -.[0][2110]=2109 -.[0][2111]=2110 -.[0][2112]=2111 -.[0][2113]=2112 -.[0][2114]=2113 -.[0][2115]=2114 -.[0][2116]=2115 -.[0][2117]=2116 -.[0][2118]=2117 -.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json deleted file mode 100644 index e4ab4cd..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json +++ /dev/null @@ -1,2 +0,0 @@ -[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] -] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected deleted file mode 100644 index 67444e5..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected +++ /dev/null @@ -1 +0,0 @@ -.={} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json deleted file mode 100644 index 0967ef4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected deleted file mode 100644 index 79391c2..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json deleted file mode 100644 index d0f2fac..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json +++ /dev/null @@ -1 +0,0 @@ -{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected deleted file mode 100644 index 5e96113..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected +++ /dev/null @@ -1,4 +0,0 @@ -.={} -.attribute="random" -.count=1234 -.name="test" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json deleted file mode 100644 index 4fcd4d8..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "count" : 1234, - "name" : "test", - "attribute" : "random" -} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected deleted file mode 100644 index 812965b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.={} -.=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json deleted file mode 100644 index 450762d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "" : 1234 -} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected deleted file mode 100644 index 8d88041..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected +++ /dev/null @@ -1,3 +0,0 @@ -.={} -.first=1 -.second=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json deleted file mode 100644 index fabd55d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json +++ /dev/null @@ -1,14 +0,0 @@ -/* A comment - at the beginning of the file. - */ -{ - "first" : 1, // comment after 'first' on the same line - -/* Comment before 'second' - */ - "second" : 2 -} - -/* A comment at - the end of the file. - */ diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected deleted file mode 100644 index ae23572..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json deleted file mode 100644 index 358452d..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json +++ /dev/null @@ -1,3 +0,0 @@ -// 2^33 => out of integer range, switch to double -8589934592 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected deleted file mode 100644 index df8de42..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=-4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json deleted file mode 100644 index 936c706..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json +++ /dev/null @@ -1,3 +0,0 @@ -// -2^32 => out of signed integer range, switch to double --4294967295 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected deleted file mode 100644 index d726abe..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected +++ /dev/null @@ -1,2 +0,0 @@ -.=1.2345678 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json deleted file mode 100644 index a8eb6d0..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1.2345678 -12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected deleted file mode 100644 index 949fd8f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json deleted file mode 100644 index f7923ba..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json +++ /dev/null @@ -1,3 +0,0 @@ -// 1234567.8 -0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected deleted file mode 100644 index 03b7d7f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1.2345678 - - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json deleted file mode 100644 index 485419a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1.2345678 --12345678e-7 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected deleted file mode 100644 index 12025a4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected +++ /dev/null @@ -1,3 +0,0 @@ -.=-1234567.8 - - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json deleted file mode 100644 index 8013eb5..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json +++ /dev/null @@ -1,3 +0,0 @@ -// -1234567.8 --0.12345678e7 - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected deleted file mode 100644 index 8fd37b1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json deleted file mode 100644 index c8c059b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected deleted file mode 100644 index 0443bc3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json deleted file mode 100644 index f0fe56a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json +++ /dev/null @@ -1 +0,0 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected deleted file mode 100644 index 6ed627a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json deleted file mode 100644 index f0a220f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json +++ /dev/null @@ -1 +0,0 @@ -"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected deleted file mode 100644 index 447f85a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected +++ /dev/null @@ -1 +0,0 @@ -.="a" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json deleted file mode 100644 index 024114b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json +++ /dev/null @@ -1 +0,0 @@ -"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected deleted file mode 100644 index c0b3b43..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected +++ /dev/null @@ -1 +0,0 @@ -.="¢" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json deleted file mode 100644 index 4961024..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json +++ /dev/null @@ -1 +0,0 @@ -"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected deleted file mode 100644 index 7289743..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected +++ /dev/null @@ -1 +0,0 @@ -.="€" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json deleted file mode 100644 index e7e1a9e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json +++ /dev/null @@ -1 +0,0 @@ -"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected deleted file mode 100644 index 868fbc3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected +++ /dev/null @@ -1 +0,0 @@ -.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json deleted file mode 100644 index dae65c5..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json +++ /dev/null @@ -1 +0,0 @@ -"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected deleted file mode 100644 index 19b2c40..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected +++ /dev/null @@ -1,2 +0,0 @@ -.="Zażółć gęślą jaźń" - diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json deleted file mode 100644 index 8770410..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json +++ /dev/null @@ -1 +0,0 @@ -"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py deleted file mode 100644 index 5b215c4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py +++ /dev/null @@ -1,11 +0,0 @@ -import glob -import os.path -for path in glob.glob( '*.json' ): - text = file(path,'rt').read() - target = os.path.splitext(path)[0] + '.expected' - if os.path.exists( target ): - print 'skipping:', target - else: - print 'creating:', target - file(target,'wt').write(text) - diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json deleted file mode 100644 index 6216b86..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json +++ /dev/null @@ -1 +0,0 @@ -"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json deleted file mode 100644 index 5d8c004..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json deleted file mode 100644 index 76eb95b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json deleted file mode 100644 index 77580a4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json +++ /dev/null @@ -1 +0,0 @@ -{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json deleted file mode 100644 index 379406b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json deleted file mode 100644 index 0ed366b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json +++ /dev/null @@ -1 +0,0 @@ -{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json deleted file mode 100644 index fc8376b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json deleted file mode 100644 index 3fe21d4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json +++ /dev/null @@ -1 +0,0 @@ -[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json deleted file mode 100644 index 62b9214..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json +++ /dev/null @@ -1 +0,0 @@ -["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json deleted file mode 100644 index edac927..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json deleted file mode 100644 index 3b9c46f..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json +++ /dev/null @@ -1 +0,0 @@ -{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json deleted file mode 100644 index 6b7c11e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json +++ /dev/null @@ -1 +0,0 @@ -["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json deleted file mode 100644 index 27c1af3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json +++ /dev/null @@ -1 +0,0 @@ -{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json deleted file mode 100644 index 6247457..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json deleted file mode 100644 index a775258..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json +++ /dev/null @@ -1 +0,0 @@ -["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json deleted file mode 100644 index 494add1..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json +++ /dev/null @@ -1 +0,0 @@ -["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json deleted file mode 100644 index caff239..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json +++ /dev/null @@ -1 +0,0 @@ -['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json deleted file mode 100644 index 8b7ad23..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json +++ /dev/null @@ -1 +0,0 @@ -[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json deleted file mode 100644 index 845d26a..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json +++ /dev/null @@ -1 +0,0 @@ -["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json deleted file mode 100644 index 6b01a2c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json +++ /dev/null @@ -1,2 +0,0 @@ -["line -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json deleted file mode 100644 index 621a010..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json +++ /dev/null @@ -1,2 +0,0 @@ -["line\ -break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json deleted file mode 100644 index 47ec421..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json +++ /dev/null @@ -1 +0,0 @@ -[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json deleted file mode 100644 index 168c81e..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json +++ /dev/null @@ -1 +0,0 @@ -{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json deleted file mode 100644 index 8ab0bc4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json +++ /dev/null @@ -1 +0,0 @@ -[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json deleted file mode 100644 index 1cce602..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json +++ /dev/null @@ -1 +0,0 @@ -[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json deleted file mode 100644 index 45cba73..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json +++ /dev/null @@ -1 +0,0 @@ -{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json deleted file mode 100644 index ca5eb19..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json +++ /dev/null @@ -1 +0,0 @@ -["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json deleted file mode 100644 index 9de168b..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json +++ /dev/null @@ -1 +0,0 @@ -["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json deleted file mode 100644 index ddf3ce3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json +++ /dev/null @@ -1 +0,0 @@ -["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json deleted file mode 100644 index ed91580..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json +++ /dev/null @@ -1 +0,0 @@ -[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json deleted file mode 100644 index 8a96af3..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json +++ /dev/null @@ -1 +0,0 @@ -["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json deleted file mode 100644 index b28479c..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json +++ /dev/null @@ -1 +0,0 @@ -["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json deleted file mode 100644 index 5815574..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json +++ /dev/null @@ -1 +0,0 @@ -{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json deleted file mode 100644 index 70e2685..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json +++ /dev/null @@ -1,58 +0,0 @@ -[ - "JSON Test Pattern pass1", - {"object with 1 member":["array with 1 element"]}, - {}, - [], - -42, - true, - false, - null, - { - "integer": 1234567890, - "real": -9876.543210, - "e": 0.123456789e-12, - "E": 1.234567890E+34, - "": 23456789012E66, - "zero": 0, - "one": 1, - "space": " ", - "quote": "\"", - "backslash": "\\", - "controls": "\b\f\n\r\t", - "slash": "/ & \/", - "alpha": "abcdefghijklmnopqrstuvwyz", - "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", - "digit": "0123456789", - "0123456789": "digit", - "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", - "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", - "true": true, - "false": false, - "null": null, - "array":[ ], - "object":{ }, - "address": "50 St. James Street", - "url": "http://www.JSON.org/", - "comment": "// /* */": " ", - " s p a c e d " :[1,2 , 3 - -, - -4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], - "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", - "quotes": "" \u0022 %22 0x22 034 "", - "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" -: "A key can be any string" - }, - 0.5 ,98.6 -, -99.44 -, - -1066, -1e1, -0.1e1, -1e-1, -1e00,2e+00,2e-00 -,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json deleted file mode 100644 index d3c63c7..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json +++ /dev/null @@ -1 +0,0 @@ -[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json deleted file mode 100644 index 4528d51..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "JSON Test Pattern pass3": { - "The outermost value": "must be an object or array.", - "In this test": "It is an object." - } -} diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt deleted file mode 100644 index 0efc2a4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt +++ /dev/null @@ -1,3 +0,0 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py deleted file mode 100644 index 504f3db..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py +++ /dev/null @@ -1,64 +0,0 @@ -# Simple implementation of a json test runner to run the test against json-py. - -import sys -import os.path -import json -import types - -if len(sys.argv) != 2: - print "Usage: %s input-json-file", sys.argv[0] - sys.exit(3) - -input_path = sys.argv[1] -base_path = os.path.splitext(input_path)[0] -actual_path = base_path + '.actual' -rewrite_path = base_path + '.rewrite' -rewrite_actual_path = base_path + '.actual-rewrite' - -def valueTreeToString( fout, value, path = '.' ): - ty = type(value) - if ty is types.DictType: - fout.write( '%s={}\n' % path ) - suffix = path[-1] != '.' and '.' or '' - names = value.keys() - names.sort() - for name in names: - valueTreeToString( fout, value[name], path + suffix + name ) - elif ty is types.ListType: - fout.write( '%s=[]\n' % path ) - for index, childValue in zip( xrange(0,len(value)), value ): - valueTreeToString( fout, childValue, path + '[%d]' % index ) - elif ty is types.StringType: - fout.write( '%s="%s"\n' % (path,value) ) - elif ty is types.IntType: - fout.write( '%s=%d\n' % (path,value) ) - elif ty is types.FloatType: - fout.write( '%s=%.16g\n' % (path,value) ) - elif value is True: - fout.write( '%s=true\n' % path ) - elif value is False: - fout.write( '%s=false\n' % path ) - elif value is None: - fout.write( '%s=null\n' % path ) - else: - assert False and "Unexpected value type" - -def parseAndSaveValueTree( input, actual_path ): - root = json.loads( input ) - fout = file( actual_path, 'wt' ) - valueTreeToString( fout, root ) - fout.close() - return root - -def rewriteValueTree( value, rewrite_path ): - rewrite = json.dumps( value ) - #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? - file( rewrite_path, 'wt').write( rewrite + '\n' ) - return rewrite - -input = file( input_path, 'rt' ).read() -root = parseAndSaveValueTree( input, actual_path ) -rewrite = rewriteValueTree( json.write( root ), rewrite_path ) -rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) - -sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py deleted file mode 100644 index ffe8bd5..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys -import os -import os.path -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' - -def compareOutputs( expected, actual, message ): - expected = expected.strip().replace('\r','').split('\n') - actual = actual.strip().replace('\r','').split('\n') - diff_line = 0 - max_line_to_compare = min( len(expected), len(actual) ) - for index in xrange(0,max_line_to_compare): - if expected[index].strip() != actual[index].strip(): - diff_line = index + 1 - break - if diff_line == 0 and len(expected) != len(actual): - diff_line = max_line_to_compare+1 - if diff_line == 0: - return None - def safeGetLine( lines, index ): - index += -1 - if index >= len(lines): - return '' - return lines[index].strip() - return """ Difference in %s at line %d: - Expected: '%s' - Actual: '%s' -""" % (message, diff_line, - safeGetLine(expected,diff_line), - safeGetLine(actual,diff_line) ) - -def safeReadFile( path ): - try: - return file( path, 'rt' ).read() - except IOError, e: - return '' % (path,e) - -def runAllTests( jsontest_executable_path, input_dir = None, - use_valgrind=False, with_json_checker=False ): - if not input_dir: - input_dir = os.path.join( os.getcwd(), 'data' ) - tests = glob( os.path.join( input_dir, '*.json' ) ) - if with_json_checker: - test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) - else: - test_jsonchecker = [] - failed_tests = [] - valgrind_path = use_valgrind and VALGRIND_CMD or '' - for input_path in tests + test_jsonchecker: - expect_failure = os.path.basename( input_path ).startswith( 'fail' ) - is_json_checker_test = (input_path in test_jsonchecker) or expect_failure - print 'TESTING:', input_path, - options = is_json_checker_test and '--json-checker' or '' - pipe = os.popen( "%s%s %s %s" % ( - valgrind_path, jsontest_executable_path, options, - input_path) ) - process_output = pipe.read() - status = pipe.close() - if is_json_checker_test: - if expect_failure: - if status is None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % - safeReadFile(input_path)) ) - else: - print 'OK' - else: - if status is not None: - print 'FAILED' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - print 'OK' - else: - base_path = os.path.splitext(input_path)[0] - actual_output = safeReadFile( base_path + '.actual' ) - actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) - file(base_path + '.process-output','wt').write( process_output ) - if status: - print 'parsing failed' - failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) - else: - expected_output_path = os.path.splitext(input_path)[0] + '.expected' - expected_output = file( expected_output_path, 'rt' ).read() - detail = ( compareOutputs( expected_output, actual_output, 'input' ) - or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) - if detail: - print 'FAILED' - failed_tests.append( (input_path, detail) ) - else: - print 'OK' - - if failed_tests: - print - print 'Failure details:' - for failed_test in failed_tests: - print '* Test', failed_test[0] - print failed_test[1] - print - print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), - len(failed_tests) ) - return 1 - else: - print 'All %d tests passed.' % len(tests) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] [test case directory]" ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.add_option("-c", "--with-json-checker", - action="store_true", dest="with_json_checker", default=False, - help="run all the tests from the official JSONChecker test suite of json.org") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) < 1 or len(args) > 2: - parser.error( 'Must provides at least path to jsontestrunner executable.' ) - sys.exit( 1 ) - - jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) - if len(args) > 1: - input_path = os.path.normpath( os.path.abspath( args[1] ) ) - else: - input_path = None - status = runAllTests( jsontest_executable_path, input_path, - use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) - sys.exit( status ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc2/test/rununittests.py b/tags/jsoncpp/0.6.0-rc2/test/rununittests.py deleted file mode 100644 index ccc54e4..0000000 --- a/tags/jsoncpp/0.6.0-rc2/test/rununittests.py +++ /dev/null @@ -1,73 +0,0 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() diff --git a/tags/jsoncpp/0.6.0-rc2/version b/tags/jsoncpp/0.6.0-rc2/version deleted file mode 100644 index 673adfb..0000000 --- a/tags/jsoncpp/0.6.0-rc2/version +++ /dev/null @@ -1 +0,0 @@ -0.6.0-rc2 \ No newline at end of file From 4071cd16c488b6eed0e6661ea921d9a65157013b Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 21:46:58 +0000 Subject: [PATCH 191/268] Release 0.6.0-rc2 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@191 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- tags/jsoncpp/0.6.0-rc2/AUTHORS | 1 + tags/jsoncpp/0.6.0-rc2/LICENSE | 55 + tags/jsoncpp/0.6.0-rc2/NEWS.txt | 101 + tags/jsoncpp/0.6.0-rc2/README.txt | 172 ++ tags/jsoncpp/0.6.0-rc2/SConstruct | 248 ++ tags/jsoncpp/0.6.0-rc2/amalgamate.py | 147 ++ tags/jsoncpp/0.6.0-rc2/devtools/__init__.py | 1 + tags/jsoncpp/0.6.0-rc2/devtools/antglob.py | 201 ++ tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py | 63 + .../0.6.0-rc2/devtools/licenseupdater.py | 93 + tags/jsoncpp/0.6.0-rc2/devtools/tarball.py | 53 + tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in | 1534 ++++++++++++ tags/jsoncpp/0.6.0-rc2/doc/footer.html | 23 + tags/jsoncpp/0.6.0-rc2/doc/header.html | 24 + tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox | 126 + tags/jsoncpp/0.6.0-rc2/doc/readme.txt | 1 + tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox | 37 + tags/jsoncpp/0.6.0-rc2/doxybuild.py | 169 ++ .../jsoncpp/0.6.0-rc2/include/json/autolink.h | 24 + tags/jsoncpp/0.6.0-rc2/include/json/config.h | 96 + .../jsoncpp/0.6.0-rc2/include/json/features.h | 49 + .../jsoncpp/0.6.0-rc2/include/json/forwards.h | 44 + tags/jsoncpp/0.6.0-rc2/include/json/json.h | 15 + tags/jsoncpp/0.6.0-rc2/include/json/reader.h | 214 ++ tags/jsoncpp/0.6.0-rc2/include/json/value.h | 1103 +++++++++ tags/jsoncpp/0.6.0-rc2/include/json/writer.h | 185 ++ .../0.6.0-rc2/makefiles/vs71/jsoncpp.sln | 46 + .../0.6.0-rc2/makefiles/vs71/jsontest.vcproj | 119 + .../0.6.0-rc2/makefiles/vs71/lib_json.vcproj | 214 ++ .../makefiles/vs71/test_lib_json.vcproj | 130 + tags/jsoncpp/0.6.0-rc2/makerelease.py | 380 +++ .../jsoncpp/0.6.0-rc2/scons-tools/globtool.py | 53 + tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py | 179 ++ .../0.6.0-rc2/scons-tools/substinfile.py | 79 + tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py | 82 + .../0.6.0-rc2/src/jsontestrunner/main.cpp | 269 +++ .../0.6.0-rc2/src/jsontestrunner/sconscript | 9 + .../src/lib_json/json_batchallocator.h | 130 + .../src/lib_json/json_internalarray.inl | 456 ++++ .../src/lib_json/json_internalmap.inl | 615 +++++ .../0.6.0-rc2/src/lib_json/json_reader.cpp | 880 +++++++ .../0.6.0-rc2/src/lib_json/json_tool.h | 93 + .../0.6.0-rc2/src/lib_json/json_value.cpp | 1829 ++++++++++++++ .../src/lib_json/json_valueiterator.inl | 299 +++ .../0.6.0-rc2/src/lib_json/json_writer.cpp | 838 +++++++ .../jsoncpp/0.6.0-rc2/src/lib_json/sconscript | 8 + .../0.6.0-rc2/src/test_lib_json/jsontest.cpp | 608 +++++ .../0.6.0-rc2/src/test_lib_json/jsontest.h | 259 ++ .../0.6.0-rc2/src/test_lib_json/main.cpp | 430 ++++ .../0.6.0-rc2/src/test_lib_json/sconscript | 10 + tags/jsoncpp/0.6.0-rc2/test/cleantests.py | 10 + .../test/data/fail_test_array_01.json | 1 + .../test/data/test_array_01.expected | 1 + .../0.6.0-rc2/test/data/test_array_01.json | 1 + .../test/data/test_array_02.expected | 2 + .../0.6.0-rc2/test/data/test_array_02.json | 1 + .../test/data/test_array_03.expected | 6 + .../0.6.0-rc2/test/data/test_array_03.json | 1 + .../test/data/test_array_04.expected | 5 + .../0.6.0-rc2/test/data/test_array_04.json | 1 + .../test/data/test_array_05.expected | 100 + .../0.6.0-rc2/test/data/test_array_05.json | 1 + .../test/data/test_array_06.expected | 5 + .../0.6.0-rc2/test/data/test_array_06.json | 4 + .../test/data/test_basic_01.expected | 1 + .../0.6.0-rc2/test/data/test_basic_01.json | 1 + .../test/data/test_basic_02.expected | 1 + .../0.6.0-rc2/test/data/test_basic_02.json | 1 + .../test/data/test_basic_03.expected | 3 + .../0.6.0-rc2/test/data/test_basic_03.json | 3 + .../test/data/test_basic_04.expected | 2 + .../0.6.0-rc2/test/data/test_basic_04.json | 2 + .../test/data/test_basic_05.expected | 2 + .../0.6.0-rc2/test/data/test_basic_05.json | 2 + .../test/data/test_basic_06.expected | 2 + .../0.6.0-rc2/test/data/test_basic_06.json | 2 + .../test/data/test_basic_07.expected | 2 + .../0.6.0-rc2/test/data/test_basic_07.json | 2 + .../test/data/test_basic_08.expected | 2 + .../0.6.0-rc2/test/data/test_basic_08.json | 3 + .../test/data/test_basic_09.expected | 2 + .../0.6.0-rc2/test/data/test_basic_09.json | 4 + .../test/data/test_comment_01.expected | 8 + .../0.6.0-rc2/test/data/test_comment_01.json | 8 + .../test/data/test_complex_01.expected | 20 + .../0.6.0-rc2/test/data/test_complex_01.json | 17 + .../test/data/test_integer_01.expected | 1 + .../0.6.0-rc2/test/data/test_integer_01.json | 2 + .../test/data/test_integer_02.expected | 1 + .../0.6.0-rc2/test/data/test_integer_02.json | 2 + .../test/data/test_integer_03.expected | 1 + .../0.6.0-rc2/test/data/test_integer_03.json | 2 + .../test/data/test_integer_04.expected | 2 + .../0.6.0-rc2/test/data/test_integer_04.json | 3 + .../test/data/test_integer_05.expected | 2 + .../0.6.0-rc2/test/data/test_integer_05.json | 2 + .../test/data/test_integer_06_64bits.expected | 1 + .../test/data/test_integer_06_64bits.json | 2 + .../test/data/test_integer_07_64bits.expected | 1 + .../test/data/test_integer_07_64bits.json | 2 + .../test/data/test_integer_08_64bits.expected | 1 + .../test/data/test_integer_08_64bits.json | 2 + .../test/data/test_large_01.expected | 2122 +++++++++++++++++ .../0.6.0-rc2/test/data/test_large_01.json | 2 + .../test/data/test_object_01.expected | 1 + .../0.6.0-rc2/test/data/test_object_01.json | 1 + .../test/data/test_object_02.expected | 2 + .../0.6.0-rc2/test/data/test_object_02.json | 1 + .../test/data/test_object_03.expected | 4 + .../0.6.0-rc2/test/data/test_object_03.json | 5 + .../test/data/test_object_04.expected | 2 + .../0.6.0-rc2/test/data/test_object_04.json | 3 + .../data/test_preserve_comment_01.expected | 3 + .../test/data/test_preserve_comment_01.json | 14 + .../0.6.0-rc2/test/data/test_real_01.expected | 2 + .../0.6.0-rc2/test/data/test_real_01.json | 3 + .../0.6.0-rc2/test/data/test_real_02.expected | 2 + .../0.6.0-rc2/test/data/test_real_02.json | 3 + .../0.6.0-rc2/test/data/test_real_03.expected | 2 + .../0.6.0-rc2/test/data/test_real_03.json | 3 + .../0.6.0-rc2/test/data/test_real_04.expected | 2 + .../0.6.0-rc2/test/data/test_real_04.json | 3 + .../0.6.0-rc2/test/data/test_real_05.expected | 3 + .../0.6.0-rc2/test/data/test_real_05.json | 3 + .../0.6.0-rc2/test/data/test_real_06.expected | 3 + .../0.6.0-rc2/test/data/test_real_06.json | 3 + .../0.6.0-rc2/test/data/test_real_07.expected | 3 + .../0.6.0-rc2/test/data/test_real_07.json | 3 + .../test/data/test_string_01.expected | 1 + .../0.6.0-rc2/test/data/test_string_01.json | 1 + .../test/data/test_string_02.expected | 1 + .../0.6.0-rc2/test/data/test_string_02.json | 1 + .../test/data/test_string_03.expected | 1 + .../0.6.0-rc2/test/data/test_string_03.json | 1 + .../test/data/test_string_unicode_01.expected | 1 + .../test/data/test_string_unicode_01.json | 1 + .../test/data/test_string_unicode_02.expected | 1 + .../test/data/test_string_unicode_02.json | 1 + .../test/data/test_string_unicode_03.expected | 1 + .../test/data/test_string_unicode_03.json | 1 + .../test/data/test_string_unicode_04.expected | 1 + .../test/data/test_string_unicode_04.json | 1 + .../test/data/test_string_unicode_05.expected | 2 + .../test/data/test_string_unicode_05.json | 1 + .../0.6.0-rc2/test/generate_expected.py | 11 + .../0.6.0-rc2/test/jsonchecker/fail1.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail10.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail11.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail12.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail13.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail14.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail15.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail16.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail17.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail18.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail19.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail2.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail20.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail21.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail22.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail23.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail24.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail25.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail26.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail27.json | 2 + .../0.6.0-rc2/test/jsonchecker/fail28.json | 2 + .../0.6.0-rc2/test/jsonchecker/fail29.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail3.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail30.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail31.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail32.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail33.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail4.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail5.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail6.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail7.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail8.json | 1 + .../0.6.0-rc2/test/jsonchecker/fail9.json | 1 + .../0.6.0-rc2/test/jsonchecker/pass1.json | 58 + .../0.6.0-rc2/test/jsonchecker/pass2.json | 1 + .../0.6.0-rc2/test/jsonchecker/pass3.json | 6 + .../0.6.0-rc2/test/jsonchecker/readme.txt | 3 + .../0.6.0-rc2/test/pyjsontestrunner.py | 64 + tags/jsoncpp/0.6.0-rc2/test/runjsontests.py | 134 ++ tags/jsoncpp/0.6.0-rc2/test/rununittests.py | 73 + tags/jsoncpp/0.6.0-rc2/version | 1 + 186 files changed, 15651 insertions(+) create mode 100644 tags/jsoncpp/0.6.0-rc2/AUTHORS create mode 100644 tags/jsoncpp/0.6.0-rc2/LICENSE create mode 100644 tags/jsoncpp/0.6.0-rc2/NEWS.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/README.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/SConstruct create mode 100644 tags/jsoncpp/0.6.0-rc2/amalgamate.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/__init__.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/antglob.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py create mode 100644 tags/jsoncpp/0.6.0-rc2/devtools/tarball.py create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/footer.html create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/header.html create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox create mode 100644 tags/jsoncpp/0.6.0-rc2/doxybuild.py create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/autolink.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/config.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/features.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/forwards.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/json.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/reader.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/value.h create mode 100644 tags/jsoncpp/0.6.0-rc2/include/json/writer.h create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj create mode 100644 tags/jsoncpp/0.6.0-rc2/makerelease.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py create mode 100644 tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py create mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp create mode 100644 tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript create mode 100644 tags/jsoncpp/0.6.0-rc2/test/cleantests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected create mode 100644 tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/generate_expected.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json create mode 100644 tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt create mode 100644 tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/runjsontests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/test/rununittests.py create mode 100644 tags/jsoncpp/0.6.0-rc2/version diff --git a/tags/jsoncpp/0.6.0-rc2/AUTHORS b/tags/jsoncpp/0.6.0-rc2/AUTHORS new file mode 100644 index 0000000..c0fbbee --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/AUTHORS @@ -0,0 +1 @@ +Baptiste Lepilleur diff --git a/tags/jsoncpp/0.6.0-rc2/LICENSE b/tags/jsoncpp/0.6.0-rc2/LICENSE new file mode 100644 index 0000000..ca2bfe1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/LICENSE @@ -0,0 +1,55 @@ +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. diff --git a/tags/jsoncpp/0.6.0-rc2/NEWS.txt b/tags/jsoncpp/0.6.0-rc2/NEWS.txt new file mode 100644 index 0000000..e53b880 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/NEWS.txt @@ -0,0 +1,101 @@ + New in JsonCpp 0.6.0: + --------------------- + +* Compilation + + - LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now + propagated to the build environment as this is required for some + compiler installation. + + - Added support for Microsoft Visual Studio 2008 (bug #2930462): + The platform "msvc90" has been added. + + Notes: you need to setup the environment by running vcvars32.bat + (e.g. MSVC 2008 command prompt in start menu) before running scons. + + - Added support for amalgamated source and header generation (a la sqlite). + Refer to README.txt section "Generating amalgamated source and header" + for detail. + +* Value + + - Removed experimental ValueAllocator, it caused static + initialization/destruction order issues (bug #2934500). + The DefaultValueAllocator has been inlined in code. + + - Added support for 64 bits integer: + + Types Json::Int64 and Json::UInt64 have been added. They are aliased + to 64 bits integers on system that support them (based on __int64 on + Microsoft Visual Studio platform, and long long on other platforms). + + Types Json::LargestInt and Json::LargestUInt have been added. They are + aliased to the largest integer type supported: + either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively. + + Json::Value::asInt() and Json::Value::asUInt() still returns plain + "int" based types, but asserts if an attempt is made to retrieve + a 64 bits value that can not represented as the return type. + + Json::Value::asInt64() and Json::Value::asUInt64() have been added + to obtain the 64 bits integer value. + + Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns + the integer as a LargestInt/LargestUInt respectively. Those functions + functions are typically used when implementing writer. + + The reader attempts to read number as 64 bits integer, and fall back + to reading a double if the number is not in the range of 64 bits + integer. + + Warning: Json::Value::asInt() and Json::Value::asUInt() now returns + long long. This changes break code that was passing the return value + to *printf() function. + + Support for 64 bits integer can be disabled by defining the macro + JSON_NO_INT64 (uncomment it in json/config.h for example), though + it should have no impact on existing usage. + + - The type Json::ArrayIndex is used for indexes of a JSON value array. It + is an unsigned int (typically 32 bits). + + - Array index can be passed as int to operator[], allowing use of literal: + Json::Value array; + array.append( 1234 ); + int value = array[0].asInt(); // did not compile previously + + - Added float Json::Value::asFloat() to obtain a floating point value as a + float (avoid lost of precision warning caused by used of asDouble() + to initialize a float). + +* Reader + + - Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages. + Bug #3023708 (Formatted has 2 't'). The old member function is deprecated + but still present for backward compatibility. + +* Tests + + - Added test to ensure that the escape sequence "\/" is corrected handled + by the parser. + +* Bug fixes + + - Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now + correctly detected. + + - Bug #3139678: stack buffer overflow when parsing a double with a + length of 32 characters. + + - Fixed Value::operator <= implementation (had the semantic of operator >=). + Found when addigin unit tests for comparison operators. + + - Value::compare() is now const and has an actual implementation with + unit tests. + +* License + + - See file LICENSE for details. Basically JsonCpp is now licensed under + MIT license, or public domain if desired and recognized in your jurisdiction. + Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who + helped figuring out the solution to the public domain issue. diff --git a/tags/jsoncpp/0.6.0-rc2/README.txt b/tags/jsoncpp/0.6.0-rc2/README.txt new file mode 100644 index 0000000..51a098a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/README.txt @@ -0,0 +1,172 @@ +* Introduction: + ============= + +JSON (JavaScript Object Notation) is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of +value, and a collection of name/value pairs. + +JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate +JSON value, handle serialization and unserialization to string. + +It can also preserve existing comment in unserialization/serialization steps, +making it a convenient format to store user input files. + +Unserialization parsing is user friendly and provides precise error reports. + + +* Building/Testing: + ================= + +JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires +python to be installed (http://www.python.org). + +You download scons-local distribution from the following url: +http://sourceforge.net/projects/scons/files/scons-local/1.2.0/ + +Unzip it in the directory where you found this README file. scons.py Should be +at the same level as README. + +python scons.py platform=PLTFRM [TARGET] +where PLTFRM may be one of: + suncc Sun C++ (Solaris) + vacpp Visual Age C++ (AIX) + mingw + msvc6 Microsoft Visual Studio 6 service pack 5-6 + msvc70 Microsoft Visual Studio 2002 + msvc71 Microsoft Visual Studio 2003 + msvc80 Microsoft Visual Studio 2005 + msvc90 Microsoft Visual Studio 2008 + linux-gcc Gnu C++ (linux, also reported to work for Mac OS X) + +Notes: if you are building with Microsoft Visual Studio 2008, you need to +setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt) +before running scons. + +Adding platform is fairly simple. You need to change the Sconstruct file +to do so. + +and TARGET may be: + check: build library and run unit tests. + + +* Running the test manually: + ========================== + +Notes that test can be run by scons using the 'check' target (see above). + +You need to run test manually only if you are troubleshooting an issue. + +In the instruction below, replace "path to jsontest.exe" with the path +of the 'jsontest' executable that was compiled on your platform. + +cd test +# This will run the Reader/Writer tests +python runjsontests.py "path to jsontest.exe" + +# This will run the Reader/Writer tests, using JSONChecker test suite +# (http://www.json.org/JSON_checker/). +# Notes: not all tests pass: JsonCpp is too lenient (for example, +# it allows an integer to start with '0'). The goal is to improve +# strict mode parsing to get all tests to pass. +python runjsontests.py --with-json-checker "path to jsontest.exe" + +# This will run the unit tests (mostly Value) +python rununittests.py "path to test_lib_json.exe" + +You can run the tests using valgrind: +python rununittests.py --valgrind "path to test_lib_json.exe" + + +* Building the documentation: + =========================== + +Run the python script doxybuild.py from the top directory: + +python doxybuild.py --open --with-dot + +See doxybuild.py --help for options. + +Notes that the documentation is also available for download as a tarball. +The documentation of the latest release is available online at: +http://jsoncpp.sourceforge.net/ + +* Generating amalgamated source and header + ======================================== + +JsonCpp is provided with a script to generate a single header and a single +source file to ease inclusion in an existing project. + +The amalgamated source can be generated at any time by running the following +command from the top-directory (requires python 2.6): + +python amalgamate.py + +It is possible to specify header name. See -h options for detail. By default, +the following files are generated: +- dist/jsoncpp.cpp: source file that need to be added to your project +- dist/json/json.h: header file corresponding to use in your project. It is +equivalent to including json/json.h in non-amalgamated source. This header +only depends on standard headers. +- dist/json/json-forwards.h: header the provides forward declaration +of all JsonCpp types. This typically what should be included in headers to +speed-up compilation. + +The amalgamated sources are generated by concatenating JsonCpp source in the +correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion +of other headers. + +* Using json-cpp in your project: + =============================== + +include/ should be added to your compiler include path. jsoncpp headers +should be included as follow: + +#include + + +* Adding a reader/writer test: + ============================ + +To add a test, you need to create two files in test/data: +- a TESTNAME.json file, that contains the input document in JSON format. +- a TESTNAME.expected file, that contains a flatened representation of + the input document. + +TESTNAME.expected file format: +- each line represents a JSON element of the element tree represented + by the input document. +- each line has two parts: the path to access the element separated from + the element value by '='. Array and object values are always empty + (e.g. represented by either [] or {}). +- element path: '.' represented the root element, and is used to separate + object members. [N] is used to specify the value of an array element + at index N. +See test_complex_01.json and test_complex_01.expected to better understand +element path. + + +* Understanding reader/writer test output: + ======================================== + +When a test is run, output files are generated aside the input test files. +Below is a short description of the content of each file: + +- test_complex_01.json: input JSON document +- test_complex_01.expected: flattened JSON element tree used to check if + parsing was corrected. + +- test_complex_01.actual: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.json +- test_complex_01.rewrite: JSON document written by jsontest.exe using the + Json::Value parsed from test_complex_01.json and serialized using + Json::StyledWritter. +- test_complex_01.actual-rewrite: flattened JSON element tree produced by + jsontest.exe from reading test_complex_01.rewrite. +test_complex_01.process-output: jsontest.exe output, typically useful to + understand parsing error. + +* License + ======= + +See file LICENSE for details. Basically JsonCpp is licensed under +MIT license, or public domain if desired and recognized in your jurisdiction. diff --git a/tags/jsoncpp/0.6.0-rc2/SConstruct b/tags/jsoncpp/0.6.0-rc2/SConstruct new file mode 100644 index 0000000..23225cb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/SConstruct @@ -0,0 +1,248 @@ +""" +Notes: +- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time. + +To add a platform: +- add its name in options allowed_values below +- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example. +""" + +import os +import os.path +import sys + +JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip() +DIST_DIR = '#dist' + +options = Variables() +options.Add( EnumVariable('platform', + 'Platform (compiler/stl) used to build the project', + 'msvc71', + allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(), + ignorecase=2) ) + +try: + platform = ARGUMENTS['platform'] + if platform == 'linux-gcc': + CXX = 'g++' # not quite right, but env is not yet available. + import commands + version = commands.getoutput('%s -dumpversion' %CXX) + platform = 'linux-gcc-%s' %version + print "Using platform '%s'" %platform + LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') + LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform) + os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH + print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH +except KeyError: + print 'You must specify a "platform"' + sys.exit(2) + +print "Building using PLATFORM =", platform + +rootbuild_dir = Dir('#buildscons') +build_dir = os.path.join( '#buildscons', platform ) +bin_dir = os.path.join( '#bin', platform ) +lib_dir = os.path.join( '#libs', platform ) +sconsign_dir_path = Dir(build_dir).abspath +sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' ) + +# Ensure build directory exist (SConsignFile fail otherwise!) +if not os.path.exists( sconsign_dir_path ): + os.makedirs( sconsign_dir_path ) + +# Store all dependencies signature in a database +SConsignFile( sconsign_path ) + +def make_environ_vars(): + """Returns a dictionnary with environment variable to use when compiling.""" + # PATH is required to find the compiler + # TEMP is required for at least mingw + # LD_LIBRARY_PATH & co is required on some system for the compiler + vars = {} + for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'): + if name in os.environ: + vars[name] = os.environ[name] + return vars + + +env = Environment( ENV = make_environ_vars(), + toolpath = ['scons-tools'], + tools=[] ) #, tools=['default'] ) + +if platform == 'suncc': + env.Tool( 'sunc++' ) + env.Tool( 'sunlink' ) + env.Tool( 'sunar' ) + env.Append( CCFLAGS = ['-mt'] ) +elif platform == 'vacpp': + env.Tool( 'default' ) + env.Tool( 'aixcc' ) + env['CXX'] = 'xlC_r' #scons does not pick-up the correct one ! + # using xlC_r ensure multi-threading is enabled: + # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm + env.Append( CCFLAGS = '-qrtti=all', + LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning +elif platform == 'msvc6': + env['MSVS_VERSION']='6.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc70': + env['MSVS_VERSION']='7.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc71': + env['MSVS_VERSION']='7.1' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -GX /nologo /MT' +elif platform == 'msvc80': + env['MSVS_VERSION']='8.0' + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'msvc90': + env['MSVS_VERSION']='9.0' + # Scons 1.2 fails to detect the correct location of the platform SDK. + # So we propagate those from the environment. This requires that the + # user run vcvars32.bat before compiling. + if 'INCLUDE' in os.environ: + env['ENV']['INCLUDE'] = os.environ['INCLUDE'] + if 'LIB' in os.environ: + env['ENV']['LIB'] = os.environ['LIB'] + for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']: + env.Tool( tool ) + env['CXXFLAGS']='-GR -EHsc /nologo /MT' +elif platform == 'mingw': + env.Tool( 'mingw' ) + env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] ) +elif platform.startswith('linux-gcc'): + env.Tool( 'default' ) + env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" ) + env['SHARED_LIB_ENABLED'] = True +else: + print "UNSUPPORTED PLATFORM." + env.Exit(1) + +env.Tool('targz') +env.Tool('srcdist') +env.Tool('globtool') + +env.Append( CPPPATH = ['#include'], + LIBPATH = lib_dir ) +short_platform = platform +if short_platform.startswith('msvc'): + short_platform = short_platform[2:] +# Notes: on Windows you need to rebuild the source for each variant +# Build script does not support that yet so we only build static libraries. +# This also fails on AIX because both dynamic and static library ends with +# extension .a. +env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False) +env['LIB_PLATFORM'] = short_platform +env['LIB_LINK_TYPE'] = 'lib' # static +env['LIB_CRUNTIME'] = 'mt' +env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention +env['JSONCPP_VERSION'] = JSONCPP_VERSION +env['BUILD_DIR'] = env.Dir(build_dir) +env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir) +env['DIST_DIR'] = DIST_DIR +if 'TarGz' in env['BUILDERS']: + class SrcDistAdder: + def __init__( self, env ): + self.env = env + def __call__( self, *args, **kw ): + apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw ) + env['SRCDIST_BUILDER'] = env.TarGz +else: # If tarfile module is missing + class SrcDistAdder: + def __init__( self, env ): + pass + def __call__( self, *args, **kw ): + pass +env['SRCDIST_ADD'] = SrcDistAdder( env ) +env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] ) + +env_testing = env.Clone( ) +env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] ) + +def buildJSONExample( env, target_sources, target_name ): + env = env.Clone() + env.Append( CPPPATH = ['#'] ) + exe = env.Program( target=target_name, + source=target_sources ) + env['SRCDIST_ADD']( source=[target_sources] ) + global bin_dir + return env.Install( bin_dir, exe ) + +def buildJSONTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildUnitTests( env, target_sources, target_name ): + jsontests_node = buildJSONExample( env, target_sources, target_name ) + check_alias_target = env.Alias( 'check', jsontests_node, + RunUnitTests( jsontests_node, jsontests_node ) ) + env.AlwaysBuild( check_alias_target ) + +def buildLibrary( env, target_sources, target_name ): + static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + global lib_dir + env.Install( lib_dir, static_lib ) + if env['SHARED_LIB_ENABLED']: + shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}', + source=target_sources ) + env.Install( lib_dir, shared_lib ) + env['SRCDIST_ADD']( source=[target_sources] ) + +Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' ) + +def buildProjectInDirectory( target_directory ): + global build_dir + target_build_dir = os.path.join( build_dir, target_directory ) + target = os.path.join( target_directory, 'sconscript' ) + SConscript( target, build_dir=target_build_dir, duplicate=0 ) + env['SRCDIST_ADD']( source=[target] ) + + +def runJSONTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + data_path = os.path.join( jsontest_path, 'data' ) + import runjsontests + return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path ) + +def runJSONTests_string( target, source = None, env = None ): + return 'RunJSONTests("%s")' % source[0] + +import SCons.Action +ActionFactory = SCons.Action.ActionFactory +RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string ) + +def runUnitTests_action( target, source = None, env = None ): + # Add test scripts to python path + jsontest_path = Dir( '#test' ).abspath + sys.path.insert( 0, jsontest_path ) + import rununittests + return rununittests.runAllTests( os.path.abspath(source[0].path) ) + +def runUnitTests_string( target, source = None, env = None ): + return 'RunUnitTests("%s")' % source[0] + +RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string ) + +env.Alias( 'check' ) + +srcdist_cmd = env['SRCDIST_ADD']( source = """ + AUTHORS README.txt SConstruct + """.split() ) +env.Alias( 'src-dist', srcdist_cmd ) + +buildProjectInDirectory( 'src/jsontestrunner' ) +buildProjectInDirectory( 'src/lib_json' ) +buildProjectInDirectory( 'src/test_lib_json' ) +#print env.Dump() + diff --git a/tags/jsoncpp/0.6.0-rc2/amalgamate.py b/tags/jsoncpp/0.6.0-rc2/amalgamate.py new file mode 100644 index 0000000..1476a5f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/amalgamate.py @@ -0,0 +1,147 @@ +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalgamationFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( '\n' ): + text += '\n' + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( '' ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '' ) + add_marker( 'Beginning' ) + f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) + content = f.read() + if wrap_in_comment: + content = '/*\n' + content + '\n*/' + self.add_text( content ) + f.close() + add_marker( 'End' ) + self.add_text( '\n\n\n\n' ) + + def get_value( self ): + return ''.join( self.blocks ).replace('\r\n','\n') + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, 'wb' ) + f.write( self.get_value() ) + f.close() + +def amalgamate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print 'Amalgating header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_file( 'include/json/features.h' ) + header.add_file( 'include/json/value.h' ) + header.add_file( 'include/json/reader.h' ) + header.add_file( 'include/json/writer.h' ) + header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print 'Writing amalgated header to %r' % target_header_path + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + '-forwards' + ext + print 'Amalgating forward header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) + header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print 'Writing amalgated forward header to %r' % target_forward_header_path + header.write_to( target_forward_header_path ) + + print 'Amalgating source...' + source = AmalgamationFile( source_top_dir ) + source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) + source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + source.add_file( 'LICENSE', wrap_in_comment=True ) + source.add_text( '' ) + source.add_text( '#include <%s>' % header_include_path ) + source.add_text( '' ) + source.add_file( 'src/lib_json\json_tool.h' ) + source.add_file( 'src/lib_json\json_reader.cpp' ) + source.add_file( 'src/lib_json\json_batchallocator.h' ) + source.add_file( 'src/lib_json\json_valueiterator.inl' ) + source.add_file( 'src/lib_json\json_value.cpp' ) + source.add_file( 'src/lib_json\json_writer.cpp' ) + + print 'Writing amalgated source to %r' % target_source_path + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', + help="""Output .cpp source path. [Default: %default]""") + parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgamate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + '\n' ) + sys.exit( 1 ) + else: + print 'Source succesfully amalagated' + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py b/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py b/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py new file mode 100644 index 0000000..bbb6fec --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/antglob.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Baptiste Lepilleur, 2009 + +from dircache import listdir +import re +import fnmatch +import os.path + + +# These fnmatch expressions are used by default to prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS ' + +# These fnmatch expressions are used by default to exclude files and dirs +# while doing the recursive traversal in the glob_impl method of glob function. +##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split() + +# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree +# while doing the recursive traversal in the glob_impl method of glob function. +default_excludes = ''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/_darcs +**/_darcs/** +**/.DS_Store ''' + +DIR = 1 +FILE = 2 +DIR_LINK = 4 +FILE_LINK = 8 +LINKS = DIR_LINK | FILE_LINK +ALL_NO_LINK = DIR | FILE +ALL = DIR | FILE | LINKS + +_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) + +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' + """ + rex = ['^'] + next_pos = 0 + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos + if match.start(0) != next_pos: + raise ValueError( "Invalid ant pattern" ) + if match.group(1): # /**/ + rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) + elif match.group(2): # **/ + rex.append( '(?:.*%s)?' % sep_rex ) + elif match.group(3): # /** + rex.append( sep_rex + '.*' ) + elif match.group(4): # * + rex.append( '[^/%s]*' % re.escape(os.path.sep) ) + elif match.group(5): # / + rex.append( sep_rex ) + else: # somepath + rex.append( re.escape(match.group(6)) ) + next_pos = match.end() + rex.append('$') + return re.compile( ''.join( rex ) ) + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l + +def glob(dir_path, + includes = '**/*', + excludes = default_excludes, + entry_type = FILE, + prune_dirs = prune_dirs, + max_depth = 25): + include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + dir_path = dir_path.replace('/',os.path.sep) + entry_type_filter = entry_type + + def is_pruned_dir( dir_name ): + for pattern in prune_dirs: + if fnmatch.fnmatch( dir_name, pattern ): + return True + return False + + def apply_filter( full_path, filter_rexs ): + """Return True if at least one of the filter regular expression match full_path.""" + for rex in filter_rexs: + if rex.match( full_path ): + return True + return False + + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: + dir_path = child_dirs.pop() + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' + return list( glob_impl( dir_path ) ) + + +if __name__ == "__main__": + import unittest + + class AntPatternToRETest(unittest.TestCase): +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) + + unittest.main() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py b/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py b/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py new file mode 100644 index 0000000..03e0467 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/licenseupdater.py @@ -0,0 +1,93 @@ +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py b/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py new file mode 100644 index 0000000..182602e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/devtools/tarball.py @@ -0,0 +1,53 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in b/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in new file mode 100644 index 0000000..48861d2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/doxyfile.in @@ -0,0 +1,1534 @@ +# Doxyfile 1.5.9 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project +# +# All text after a hash (#) is considered a comment and will be ignored +# The format is: +# TAG = value [value, ...] +# For lists items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (" ") + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# http://www.gnu.org/software/libiconv for the list of possible encodings. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded +# by quotes) that should identify the project. + +PROJECT_NAME = "JsonCpp" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. +# This could be handy for archiving the generated documentation or +# if some version control system is used. + +PROJECT_NUMBER = %JSONCPP_VERSION% + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) +# base path where the generated documentation will be put. +# If a relative path is entered, it will be relative to the location +# where doxygen was started. If left blank the current directory will be used. + +OUTPUT_DIRECTORY = %DOC_TOPDIR% + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create +# 4096 sub-directories (in 2 levels) under the output directory of each output +# format and will distribute the generated files over these directories. +# Enabling this option can be useful when feeding doxygen a huge amount of +# source files, where putting all generated files in the same directory would +# otherwise cause performance problems for the file system. + +CREATE_SUBDIRS = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# The default language is English, other supported languages are: +# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, +# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, +# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English +# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, +# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, +# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will +# include brief member descriptions after the members that are listed in +# the file and class documentation (similar to JavaDoc). +# Set to NO to disable this. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend +# the brief description of a member or function before the detailed description. +# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator +# that is used to form the text in various listings. Each string +# in this list, if found as the leading text of the brief description, will be +# stripped from the text and the result after processing the whole list, is +# used as the annotated text. Otherwise, the brief description is used as-is. +# If left blank, the following values are used ("$name" is automatically +# replaced with the name of the entity): "The $name class" "The $name widget" +# "The $name file" "is" "provides" "specifies" "contains" +# "represents" "a" "an" "the" + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# Doxygen will generate a detailed section even if there is only a brief +# description. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full +# path before files name in the file list and in the header files. If set +# to NO the shortest path that makes the file name unique will be used. + +FULL_PATH_NAMES = YES + +# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag +# can be used to strip a user-defined part of the path. Stripping is +# only done if one of the specified strings matches the left-hand part of +# the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the +# path to strip. + +STRIP_FROM_PATH = %TOPDIR% + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of +# the path mentioned in the documentation of a class, which tells +# the reader which header file to include in order to use a class. +# If left blank only the name of the header file containing the class +# definition is used. Otherwise one should specify the include paths that +# are normally passed to the compiler using the -I flag. + +STRIP_FROM_INC_PATH = %TOPDIR%/include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter +# (but less readable) file names. This can be useful is your file systems +# doesn't support long names like on DOS, Mac, or CD-ROM. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen +# will interpret the first line (until the first dot) of a JavaDoc-style +# comment as the brief description. If set to NO, the JavaDoc +# comments will behave just like regular Qt-style comments +# (thus requiring an explicit @brief command for a brief description.) + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then Doxygen will +# interpret the first line (until the first dot) of a Qt-style +# comment as the brief description. If set to NO, the comments +# will behave just like regular Qt-style comments (thus requiring +# an explicit \brief command for a brief description.) + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen +# treat a multi-line C++ special comment block (i.e. a block of //! or /// +# comments) as a brief description. This used to be the default behaviour. +# The new default is to treat a multi-line C++ comment block as a detailed +# description. Set this tag to YES if you prefer the old behaviour instead. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented +# member inherits the documentation from any documented member that it +# re-implements. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce +# a new page for each member. If set to NO, the documentation of a member will +# be part of the file/class/namespace that contains it. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. +# Doxygen uses this value to replace tabs by spaces in code fragments. + +TAB_SIZE = 3 + +# This tag can be used to specify a number of aliases that acts +# as commands in the documentation. An alias has the form "name=value". +# For example adding "sideeffect=\par Side Effects:\n" will allow you to +# put the command \sideeffect (or @sideeffect) in the documentation, which +# will result in a user-defined paragraph with heading "Side Effects:". +# You can put \n's in the value part of an alias to insert newlines. + +ALIASES = "testCaseSetup=\link CppUT::TestCase::setUp() setUp()\endlink" \ + "testCaseRun=\link CppUT::TestCase::run() run()\endlink" \ + "testCaseTearDown=\link CppUT::TestCase::tearDown() tearDown()\endlink" \ + "json_ref=JSON (JavaScript Object Notation)" + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C +# sources only. Doxygen will then generate output that is more tailored for C. +# For instance, some of the names that are used will be different. The list +# of all members will be omitted, etc. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java +# sources only. Doxygen will then generate output that is more tailored for +# Java. For instance, namespaces will be presented as packages, qualified +# scopes will look different, etc. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources only. Doxygen will then generate output that is more tailored for +# Fortran. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for +# VHDL. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it parses. +# With this tag you can assign which parser to use for a given extension. +# Doxygen has a built-in mapping, but you can override or extend it using this tag. +# The format is ext=language, where ext is a file extension, and language is one of +# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, +# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat +# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should +# set this tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. +# func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. +# Doxygen will parse them like normal C++ but will assume all classes use public +# instead of private inheritance when no explicit protection keyword is present. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate getter +# and setter methods for a property. Setting this option to YES (the default) +# will make doxygen to replace the get and set methods by a property in the +# documentation. This will only work if the methods are indeed getting or +# setting a simple type. If this is not the case, or you want to show the +# methods anyway, you should set this option to NO. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES (the default) to allow class member groups of +# the same type (for instance a group of public functions) to be put as a +# subgroup of that type (e.g. under the Public Functions section). Set it to +# NO to prevent subgrouping. Alternatively, this can be done per class using +# the \nosubgrouping command. + +SUBGROUPING = YES + +# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum +# is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically +# be useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. + +TYPEDEF_HIDES_STRUCT = NO + +# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to +# determine which symbols to keep in memory and which to flush to disk. +# When the cache is full, less often used symbols will be written to disk. +# For small to medium size projects (<1000 input files) the default value is +# probably good enough. For larger projects a too small cache size can cause +# doxygen to be busy swapping symbols to and from disk most of the time +# causing a significant performance penality. +# If the system has enough physical memory increasing the cache will improve the +# performance by keeping more symbols in memory. Note that the value works on +# a logarithmic scale so increasing the size by one will rougly double the +# memory usage. The cache size is given by this formula: +# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, +# corresponding to a cache size of 2^16 = 65536 symbols + +SYMBOL_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. +# Private class members and static file members will be hidden unless +# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class +# will be included in the documentation. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file +# will be included in the documentation. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) +# defined locally in source files will be included in the documentation. +# If set to NO only classes defined in header files are included. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. When set to YES local +# methods, which are defined in the implementation section but not in +# the interface are included in the documentation. +# If set to NO (the default) only methods in the interface are included. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base +# name of the file that contains the anonymous namespace. By default +# anonymous namespace are hidden. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all +# undocumented members of documented classes, files or namespaces. +# If set to NO (the default) these members will be included in the +# various overviews, but no documentation section is generated. +# This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. +# If set to NO (the default) these classes will be included in the various +# overviews. This option has no effect if EXTRACT_ALL is enabled. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all +# friend (class|struct|union) declarations. +# If set to NO (the default) these declarations will be included in the +# documentation. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any +# documentation blocks found inside the body of a function. +# If set to NO (the default) these blocks will be appended to the +# function's detailed documentation block. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation +# that is typed after a \internal command is included. If the tag is set +# to NO (the default) then the documentation will be excluded. +# Set it to YES to include the internal documentation. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate +# file names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen +# will show members with their full class and namespace scopes in the +# documentation. If set to YES the scope will be hidden. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen +# will put a list of the files that are included by a file in the documentation +# of that file. + +SHOW_INCLUDE_FILES = YES + +# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] +# is inserted in the documentation for inline members. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen +# will sort the (detailed) documentation of file and class members +# alphabetically by member name. If set to NO the members will appear in +# declaration order. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the +# brief documentation of file, namespace and class members alphabetically +# by member name. If set to NO (the default) the members will appear in +# declaration order. + +SORT_BRIEF_DOCS = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the +# hierarchy of group names into alphabetical order. If set to NO (the default) +# the group names will appear in their defined order. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be +# sorted by fully-qualified names, including namespaces. If set to +# NO (the default), the class list will be sorted only by class name, +# not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the +# alphabetical list. + +SORT_BY_SCOPE_NAME = YES + +# The GENERATE_TODOLIST tag can be used to enable (YES) or +# disable (NO) the todo list. This list is created by putting \todo +# commands in the documentation. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or +# disable (NO) the test list. This list is created by putting \test +# commands in the documentation. + +GENERATE_TESTLIST = NO + +# The GENERATE_BUGLIST tag can be used to enable (YES) or +# disable (NO) the bug list. This list is created by putting \bug +# commands in the documentation. + +GENERATE_BUGLIST = NO + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or +# disable (NO) the deprecated list. This list is created by putting +# \deprecated commands in the documentation. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional +# documentation sections, marked by \if sectionname ... \endif. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines +# the initial value of a variable or define consists of for it to appear in +# the documentation. If the initializer consists of more lines than specified +# here it will be hidden. Use a value of 0 to hide initializers completely. +# The appearance of the initializer of individual variables and defines in the +# documentation can be controlled using \showinitializer or \hideinitializer +# command in the documentation regardless of this setting. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated +# at the bottom of the documentation of classes and structs. If set to YES the +# list will mention the files that were used to generate the documentation. + +SHOW_USED_FILES = YES + +# If the sources in your project are distributed over multiple directories +# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy +# in the documentation. The default is NO. + +SHOW_DIRECTORIES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. +# This will remove the Files entry from the Quick Index and from the +# Folder Tree View (if specified). The default is YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the +# Namespaces page. +# This will remove the Namespaces entry from the Quick Index +# and from the Folder Tree View (if specified). The default is YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command , where is the value of +# the FILE_VERSION_FILTER tag, and is the name of an input file +# provided by doxygen. Whatever the program writes to standard output +# is used as the file version. See the manual for examples. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by +# doxygen. The layout file controls the global structure of the generated output files +# in an output format independent way. The create the layout file that represents +# doxygen's defaults, run doxygen with the -l option. You can optionally specify a +# file name after the option, if omitted DoxygenLayout.xml will be used as the name +# of the layout file. + +LAYOUT_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated +# by doxygen. Possible values are YES and NO. If left blank NO is used. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated by doxygen. Possible values are YES and NO. If left blank +# NO is used. + +WARNINGS = YES + +# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings +# for undocumented members. If EXTRACT_ALL is set to YES then this flag will +# automatically be disabled. + +WARN_IF_UNDOCUMENTED = YES + +# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some +# parameters in a documented function, or documenting parameters that +# don't exist or using markup commands wrongly. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be abled to get warnings for +# functions that are documented, but have no documentation for their parameters +# or return value. If set to NO (the default) doxygen will only warn about +# wrong or incomplete parameter documentation, but not about the absence of +# documentation. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that +# doxygen can produce. The string should contain the $file, $line, and $text +# tags, which will be replaced by the file and line number from which the +# warning originated and the warning text. Optionally the format may contain +# $version, which will be replaced by the version of the file (if it could +# be obtained via FILE_VERSION_FILTER) + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning +# and error messages should be written. If left blank the output is written +# to stderr. + +WARN_LOGFILE = %WARNING_LOG_PATH% + +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag can be used to specify the files and/or directories that contain +# documented source files. You may enter file names like "myfile.cpp" or +# directories like "/usr/src/myproject". Separate the files or directories +# with spaces. + +INPUT = ../include ../src/lib_json . + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is +# also the default input encoding. Doxygen uses libiconv (or the iconv built +# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for +# the list of possible encodings. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank the following patterns are tested: +# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx +# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 + +FILE_PATTERNS = *.h \ + *.cpp \ + *.inl \ + *.dox + +# The RECURSIVE tag can be used to turn specify whether or not subdirectories +# should be searched for input files as well. Possible values are YES and NO. +# If left blank NO is used. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used select whether or not files or +# directories that are symbolic links (a Unix filesystem feature) are excluded +# from the input. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. Note that the wildcards are matched +# against the file with absolute path, so to exclude all test directories +# for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or +# directories that contain example code fragments that are included (see +# the \include command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp +# and *.h) to filter out the source-files in the directories. If left +# blank all files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude +# commands irrespective of the value of the RECURSIVE tag. +# Possible values are YES and NO. If left blank NO is used. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or +# directories that contain image that are included in the documentation (see +# the \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command , where +# is the value of the INPUT_FILTER tag, and is the name of an +# input file. Doxygen will then use the output that the filter program writes +# to standard output. +# If FILTER_PATTERNS is specified, this tag will be +# ignored. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. +# Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. +# The filters are a list of the form: +# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further +# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER +# is applied to all files. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will be used to filter the input files when producing source +# files to browse (i.e. when SOURCE_BROWSER is set to YES). + +FILTER_SOURCE_FILES = NO + +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will +# be generated. Documented entities will be cross-referenced with these sources. +# Note: To get rid of all source code in the generated output, make sure also +# VERBATIM_HEADERS is set to NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body +# of functions and classes directly in the documentation. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct +# doxygen to hide any special comment blocks from generated source code +# fragments. Normal C and C++ comments will always remain visible. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES +# then for each documented function all documented +# functions referencing it will be listed. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES +# then for each documented function all documented entities +# called/used by that function will be listed. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) +# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from +# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will +# link to the source code. +# Otherwise they will link to the documentation. + +REFERENCES_LINK_SOURCE = YES + +# If the USE_HTAGS tag is set to YES then the references to source code +# will point to the HTML generated by the htags(1) tool instead of doxygen +# built-in source browser. The htags tool is part of GNU's global source +# tagging system (see http://www.gnu.org/software/global/global.html). You +# will need version 4.8.6 or higher. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen +# will generate a verbatim copy of the header file for each class for +# which an include is specified. Set to NO to disable this. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index +# of all compounds will be generated. Enable this if the project +# contains a lot of classes, structs, unions or interfaces. + +ALPHABETICAL_INDEX = NO + +# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then +# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns +# in which this list will be split (can be a number in the range [1..20]) + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all +# classes will be put under the same header in the alphabetical index. +# The IGNORE_PREFIX tag can be used to specify one or more prefixes that +# should be ignored while generating the index headers. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES (the default) Doxygen will +# generate HTML output. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `html' will be used as the default path. + +HTML_OUTPUT = %HTML_OUTPUT% + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for +# each generated HTML page (for example: .htm,.php,.asp). If it is left blank +# doxygen will generate files with .html extension. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a personal HTML header for +# each generated HTML page. If it is left blank doxygen will generate a +# standard header. + +HTML_HEADER = header.html + +# The HTML_FOOTER tag can be used to specify a personal HTML footer for +# each generated HTML page. If it is left blank doxygen will generate a +# standard footer. + +HTML_FOOTER = footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading +# style sheet that is used by each HTML page. It can be used to +# fine-tune the look of the HTML output. If the tag is left blank doxygen +# will generate a default style sheet. Note that doxygen will try to copy +# the style sheet file to the HTML output directory, so don't put your own +# stylesheet in the HTML output directory as well, or it will be erased! + +HTML_STYLESHEET = + +# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, +# files or namespaces will be aligned in HTML using tables. If set to +# NO a bullet list will be used. + +HTML_ALIGN_MEMBERS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. For this to work a browser that supports +# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox +# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). + +HTML_DYNAMIC_SECTIONS = YES + +# If the GENERATE_DOCSET tag is set to YES, additional index files +# will be generated that can be used as input for Apple's Xcode 3 +# integrated development environment, introduced with OSX 10.5 (Leopard). +# To create a documentation set, doxygen will generate a Makefile in the +# HTML output directory. Running make will produce the docset in that +# directory and running "make install" will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find +# it at startup. +# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. + +GENERATE_DOCSET = NO + +# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the +# feed. A documentation feed provides an umbrella under which multiple +# documentation sets from a single provider (such as a company or product suite) +# can be grouped. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that +# should uniquely identify the documentation set bundle. This should be a +# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen +# will append .docset to the name. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# If the GENERATE_HTMLHELP tag is set to YES, additional index files +# will be generated that can be used as input for tools like the +# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) +# of the generated HTML documentation. + +GENERATE_HTMLHELP = %HTML_HELP% + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can +# be used to specify the file name of the resulting .chm file. You +# can add a path in front of the file if the result should not be +# written to the html output directory. + +CHM_FILE = jsoncpp-%JSONCPP_VERSION%.chm + +# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can +# be used to specify the location (absolute path including file name) of +# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run +# the HTML help compiler on the generated index.hhp. + +HHC_LOCATION = "c:\Program Files\HTML Help Workshop\hhc.exe" + +# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag +# controls if a separate .chi index file is generated (YES) or that +# it should be included in the master .chm file (NO). + +GENERATE_CHI = YES + +# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING +# is used to encode HtmlHelp index (hhk), content (hhc) and project file +# content. + +CHM_INDEX_ENCODING = + +# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag +# controls whether a binary table of contents is generated (YES) or a +# normal table of contents (NO) in the .chm file. + +BINARY_TOC = YES + +# The TOC_EXPAND flag can be set to YES to add extra items for group members +# to the contents of the HTML help documentation and to the tree view. + +TOC_EXPAND = YES + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER +# are set, an additional index file will be generated that can be used as input for +# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated +# HTML documentation. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can +# be used to specify the file name of the resulting .qch file. +# The path specified is relative to the HTML output folder. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#namespace + +QHP_NAMESPACE = + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating +# Qt Help Project output. For more information please see +# http://doc.trolltech.com/qthelpproject.html#virtual-folders + +QHP_VIRTUAL_FOLDER = doc + +# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. +# For more information please see +# http://doc.trolltech.com/qthelpproject.html#custom-filters + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see +# Qt Help Project / Custom Filters. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's +# filter section matches. +# Qt Help Project / Filter Attributes. + +QHP_SECT_FILTER_ATTRS = + +# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can +# be used to specify the location of Qt's qhelpgenerator. +# If non-empty doxygen will try to run qhelpgenerator on the generated +# .qhp file. + +QHG_LOCATION = + +# The DISABLE_INDEX tag can be used to turn on/off the condensed index at +# top of each HTML page. The value NO (the default) enables the index and +# the value YES disables it. + +DISABLE_INDEX = NO + +# This tag can be used to set the number of enum values (range [1..20]) +# that doxygen will group on one line in the generated HTML documentation. + +ENUM_VALUES_PER_LINE = 4 + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. +# If the tag value is set to FRAME, a side panel will be generated +# containing a tree-like index structure (just like the one that +# is generated for HTML Help). For this to work a browser that supports +# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+, +# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are +# probably better off using the HTML help feature. Other possible values +# for this tag are: HIERARCHIES, which will generate the Groups, Directories, +# and Class Hierarchy pages using a tree view instead of an ordered list; +# ALL, which combines the behavior of FRAME and HIERARCHIES; and NONE, which +# disables this behavior completely. For backwards compatibility with previous +# releases of Doxygen, the values YES and NO are equivalent to FRAME and NONE +# respectively. + +GENERATE_TREEVIEW = NO + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be +# used to set the initial width (in pixels) of the frame in which the tree +# is shown. + +TREEVIEW_WIDTH = 250 + +# Use this tag to change the font size of Latex formulas included +# as images in the HTML documentation. The default is 10. Note that +# when you change the font size after a successful doxygen run you need +# to manually remove any form_*.png images from the HTML output directory +# to force them to be regenerated. + +FORMULA_FONTSIZE = 10 + +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will +# generate Latex output. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `latex' will be used as the default path. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. If left blank `latex' will be used as the default command name. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to +# generate index for LaTeX. If left blank `makeindex' will be used as the +# default command name. + +MAKEINDEX_CMD_NAME = makeindex + +# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact +# LaTeX documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_LATEX = NO + +# The PAPER_TYPE tag can be used to set the paper type that is used +# by the printer. Possible values are: a4, a4wide, letter, legal and +# executive. If left blank a4wide will be used. + +PAPER_TYPE = a4wide + +# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX +# packages that should be included in the LaTeX output. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for +# the generated latex document. The header should contain everything until +# the first chapter. If it is left blank doxygen will generate a +# standard header. Notice: only use this tag if you know what you are doing! + +LATEX_HEADER = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated +# is prepared for conversion to pdf (using ps2pdf). The pdf file will +# contain links (just like the HTML output) instead of page references +# This makes the output suitable for online browsing using a pdf viewer. + +PDF_HYPERLINKS = NO + +# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of +# plain latex in the generated Makefile. Set this option to YES to get a +# higher quality PDF documentation. + +USE_PDFLATEX = NO + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. +# command to the generated LaTeX files. This will instruct LaTeX to keep +# running if errors occur, instead of asking the user for help. +# This option is also used when generating formulas in HTML. + +LATEX_BATCHMODE = NO + +# If LATEX_HIDE_INDICES is set to YES then doxygen will not +# include the index chapters (such as File Index, Compound Index, etc.) +# in the output. + +LATEX_HIDE_INDICES = NO + +# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. + +LATEX_SOURCE_CODE = NO + +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output +# The RTF output is optimized for Word 97 and may not look very pretty with +# other RTF readers or editors. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `rtf' will be used as the default path. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES Doxygen generates more compact +# RTF documents. This may be useful for small projects and may help to +# save some trees in general. + +COMPACT_RTF = NO + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated +# will contain hyperlink fields. The RTF file will +# contain links (just like the HTML output) instead of page references. +# This makes the output suitable for online browsing using WORD or other +# programs which support those fields. +# Note: wordpad (write) and others do not support links. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# config file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an rtf document. +# Syntax is similar to doxygen's config file. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES (the default) Doxygen will +# generate man pages + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `man' will be used as the default path. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to +# the generated man pages (default is the subroutine's section .3) + +MAN_EXTENSION = .3 + +# If the MAN_LINKS tag is set to YES and Doxygen generates man output, +# then it will generate one additional man file for each entity +# documented in the real man page(s). These additional files +# only source the real man page, but without them the man command +# would be unable to find the correct page. The default is NO. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES Doxygen will +# generate an XML file that captures the structure of +# the code including all documentation. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be +# put in front of it. If left blank `xml' will be used as the default path. + +XML_OUTPUT = xml + +# The XML_SCHEMA tag can be used to specify an XML schema, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_SCHEMA = + +# The XML_DTD tag can be used to specify an XML DTD, +# which can be used by a validating XML parser to check the +# syntax of the XML files. + +XML_DTD = + +# If the XML_PROGRAMLISTING tag is set to YES Doxygen will +# dump the program listings (including syntax highlighting +# and cross-referencing information) to the XML output. Note that +# enabling this will significantly increase the size of the XML output. + +XML_PROGRAMLISTING = YES + +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will +# generate an AutoGen Definitions (see autogen.sf.net) file +# that captures the structure of the code including all +# documentation. Note that this feature is still experimental +# and incomplete at the moment. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES Doxygen will +# generate a Perl module file that captures the structure of +# the code including all documentation. Note that this +# feature is still experimental and incomplete at the +# moment. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES Doxygen will generate +# the necessary Makefile rules, Perl scripts and LaTeX code to be able +# to generate PDF and DVI output from the Perl module output. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be +# nicely formatted so it can be parsed by a human reader. +# This is useful +# if you want to understand what is going on. +# On the other hand, if this +# tag is set to NO the size of the Perl module output will be much smaller +# and Perl will parse it just the same. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file +# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. +# This is useful so different doxyrules.make files included by the same +# Makefile don't overwrite each other's variables. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will +# evaluate all C-preprocessor directives found in the sources and include +# files. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro +# names in the source code. If set to NO (the default) only conditional +# compilation will be performed. Macro expansion can be done in a controlled +# way by setting EXPAND_ONLY_PREDEF to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES +# then the macro expansion is limited to the macros specified with the +# PREDEFINED and EXPAND_AS_DEFINED tags. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files +# in the INCLUDE_PATH (see below) will be search if a #include is found. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by +# the preprocessor. + +INCLUDE_PATH = ../include + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will +# be used. + +INCLUDE_FILE_PATTERNS = *.h + +# The PREDEFINED tag can be used to specify one or more macro names that +# are defined before the preprocessor is started (similar to the -D option of +# gcc). The argument of the tag is a list of macros of the form: name +# or name=definition (no spaces). If the definition and the = are +# omitted =1 is assumed. To prevent a macro definition from being +# undefined via #undef or recursively expanded use the := operator +# instead of the = operator. + +PREDEFINED = "_MSC_VER=1400" \ + _CPPRTTI \ + _WIN32 \ + JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \ + JSON_VALUE_USE_INTERNAL_MAP + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then +# this tag can be used to specify a list of macro names that should be expanded. +# The macro definition that is found in the sources will be used. +# Use the PREDEFINED tag if you want to use a different macro definition. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then +# doxygen's preprocessor will remove all function-like macros that are alone +# on a line, have an all uppercase name, and do not end with a semicolon. Such +# function macros are typically used for boiler-plate code, and will confuse +# the parser if not removed. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES option can be used to specify one or more tagfiles. +# Optionally an initial location of the external documentation +# can be added for each tagfile. The format of a tag file without +# this location is as follows: +# +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where "loc1" and "loc2" can be relative or absolute paths or +# URLs. If a location is present for each tag, the installdox tool +# does not have to be run to correct the links. +# Note that each tag file must have a unique name +# (where the name does NOT include the path) +# If a tag file is not located in the directory in which doxygen +# is run, you must also specify the path to the tagfile here. + +TAGFILES = + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create +# a tag file that is based on the input files it reads. + +GENERATE_TAGFILE = + +# If the ALLEXTERNALS tag is set to YES all external classes will be listed +# in the class index. If set to NO only the inherited external classes +# will be listed. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will +# be listed. + +EXTERNAL_GROUPS = YES + +# The PERL_PATH should be the absolute path and name of the perl script +# interpreter (i.e. the result of `which perl'). + +PERL_PATH = /usr/bin/perl + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will +# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base +# or super classes. Setting the tag to NO turns the diagrams off. Note that +# this option is superseded by the HAVE_DOT option below. This is only a +# fallback. It is recommended to install and use dot, since it yields more +# powerful graphs. + +CLASS_DIAGRAMS = NO + +# You can define message sequence charts within doxygen comments using the \msc +# command. Doxygen will then run the mscgen tool (see +# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the +# documentation. The MSCGEN_PATH tag allows you to specify the directory where +# the mscgen tool resides. If left empty the tool is assumed to be found in the +# default search path. + +MSCGEN_PATH = + +# If set to YES, the inheritance and collaboration graphs will hide +# inheritance and usage relations if the target is undocumented +# or is not a class. + +HIDE_UNDOC_RELATIONS = NO + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz, a graph visualization +# toolkit from AT&T and Lucent Bell Labs. The other options in this section +# have no effect if this option is set to NO (the default) + +HAVE_DOT = %HAVE_DOT% + +# By default doxygen will write a font called FreeSans.ttf to the output +# directory and reference it in all dot files that doxygen generates. This +# font does not include all possible unicode characters however, so when you need +# these (or just want a differently looking font) you can specify the font name +# using DOT_FONTNAME. You need need to make sure dot is able to find the font, +# which can be done by putting it in a standard location or by setting the +# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory +# containing the font. + +DOT_FONTNAME = FreeSans + +# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. +# The default size is 10pt. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the output directory to look for the +# FreeSans.ttf font (which doxygen will put there itself). If you specify a +# different font using DOT_FONTNAME you can set the path where dot +# can find it using this tag. + +DOT_FONTPATH = + +# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect inheritance relations. Setting this tag to YES will force the +# the CLASS_DIAGRAMS tag to NO. + +CLASS_GRAPH = YES + +# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for each documented class showing the direct and +# indirect implementation dependencies (inheritance, containment, and +# class references variables) of the class with other documented classes. + +COLLABORATION_GRAPH = YES + +# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen +# will generate a graph for groups, showing the direct groups dependencies + +GROUP_GRAPHS = YES + +# If the UML_LOOK tag is set to YES doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. + +UML_LOOK = %UML_LOOK% + +# If set to YES, the inheritance and collaboration graphs will show the +# relations between templates and their instances. + +TEMPLATE_RELATIONS = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT +# tags are set to YES then doxygen will generate a graph for each documented +# file showing the direct and indirect include dependencies of the file with +# other documented files. + +INCLUDE_GRAPH = YES + +# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and +# HAVE_DOT tags are set to YES then doxygen will generate a graph for each +# documented header file showing the documented files that directly or +# indirectly include this file. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH and HAVE_DOT options are set to YES then +# doxygen will generate a call dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable call graphs +# for selected functions only using the \callgraph command. + +CALL_GRAPH = NO + +# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then +# doxygen will generate a caller dependency graph for every global function +# or class method. Note that enabling this option will significantly increase +# the time of a run. So in most cases it will be better to enable caller +# graphs for selected functions only using the \callergraph command. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen +# will graphical hierarchy of all classes instead of a textual one. + +GRAPHICAL_HIERARCHY = YES + +# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES +# then doxygen will show the dependencies a directory has on other directories +# in a graphical way. The dependency relations are determined by the #include +# relations between the files in the directories. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. Possible values are png, jpg, or gif +# If left blank png will be used. + +DOT_IMAGE_FORMAT = png + +# The tag DOT_PATH can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. + +DOT_PATH = %DOT_PATH% + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the +# \dotfile command). + +DOTFILE_DIRS = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of +# nodes that will be shown in the graph. If the number of nodes in a graph +# becomes larger than this value, doxygen will truncate the graph, which is +# visualized by representing a node as a red box. Note that doxygen if the +# number of direct children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note +# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. + +DOT_GRAPH_MAX_NODES = 50 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the +# graphs generated by dot. A depth value of 3 means that only nodes reachable +# from the root by following a path via at most 3 edges will be shown. Nodes +# that lay further from the root node will be omitted. Note that setting this +# option to 1 or 2 may greatly reduce the computation time needed for large +# code bases. Also note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. + +MAX_DOT_GRAPH_DEPTH = 1000 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not +# seem to support this out of the box. Warning: Depending on the platform used, +# enabling this option may lead to badly anti-aliased labels on the edges of +# a graph (i.e. they become hard to read). + +DOT_TRANSPARENT = NO + +# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) +# support this, this feature is disabled by default. + +DOT_MULTI_TARGETS = YES + +# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will +# generate a legend page explaining the meaning of the various boxes and +# arrows in the dot generated graphs. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will +# remove the intermediate dot files that are used to generate +# the various graphs. + +DOT_CLEANUP = YES + +#--------------------------------------------------------------------------- +# Options related to the search engine +#--------------------------------------------------------------------------- + +# The SEARCHENGINE tag specifies whether or not a search engine should be +# used. If set to NO the values of all tags below this one will be ignored. + +SEARCHENGINE = NO diff --git a/tags/jsoncpp/0.6.0-rc2/doc/footer.html b/tags/jsoncpp/0.6.0-rc2/doc/footer.html new file mode 100644 index 0000000..a61d952 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/footer.html @@ -0,0 +1,23 @@ +
+ + + + + + + +
+ + SourceForge Logo + + hosts this site. + + + Send comments to:
+ Json-cpp Developers +
+ + + diff --git a/tags/jsoncpp/0.6.0-rc2/doc/header.html b/tags/jsoncpp/0.6.0-rc2/doc/header.html new file mode 100644 index 0000000..1a6ad61 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/header.html @@ -0,0 +1,24 @@ + + + +JsonCpp - JSON data format manipulation library + + + + + + + + + + + +
+ + JsonCpp project page + + + JsonCpp home page +
+ +
diff --git a/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox b/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox new file mode 100644 index 0000000..97cc108 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/jsoncpp.dox @@ -0,0 +1,126 @@ +/** +\mainpage +\section _intro Introduction + +JSON (JavaScript Object Notation) + is a lightweight data-interchange format. +It can represent integer, real number, string, an ordered sequence of value, and +a collection of name/value pairs. + +Here is an example of JSON data: +\verbatim +// Configuration options +{ + // Default encoding for text + "encoding" : "UTF-8", + + // Plug-ins loaded at start-up + "plug-ins" : [ + "python", + "c++", + "ruby" + ], + + // Tab indent size + "indent" : { "length" : 3, "use_space": true } +} +\endverbatim + +\section _features Features +- read and write JSON document +- attach C and C++ style comments to element during parsing +- rewrite JSON document preserving original comments + +Notes: Comments used to be supported in JSON but where removed for +portability (C like comments are not supported in Python). Since +comments are useful in configuration/input file, this feature was +preserved. + +\section _example Code example + +\code +Json::Value root; // will contains the root value after parsing. +Json::Reader reader; +bool parsingSuccessful = reader.parse( config_doc, root ); +if ( !parsingSuccessful ) +{ + // report to the user the failure and their locations in the document. + std::cout << "Failed to parse configuration\n" + << reader.getFormattedErrorMessages(); + return; +} + +// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no +// such member. +std::string encoding = root.get("encoding", "UTF-8" ).asString(); +// Get the value of the member of root named 'encoding', return a 'null' value if +// there is no such member. +const Json::Value plugins = root["plug-ins"]; +for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements. + loadPlugIn( plugins[index].asString() ); + +setIndentLength( root["indent"].get("length", 3).asInt() ); +setIndentUseSpace( root["indent"].get("use_space", true).asBool() ); + +// ... +// At application shutdown to make the new configuration document: +// Since Json::Value has implicit constructor for all value types, it is not +// necessary to explicitly construct the Json::Value object: +root["encoding"] = getCurrentEncoding(); +root["indent"]["length"] = getCurrentIndentLength(); +root["indent"]["use_space"] = getCurrentIndentUseSpace(); + +Json::StyledWriter writer; +// Make a new JSON document for the configuration. Preserve original comments. +std::string outputConfig = writer.write( root ); + +// You can also use streams. This will put the contents of any JSON +// stream at a particular sub-value, if you'd like. +std::cin >> root["subtree"]; + +// And you can write to a stream, using the StyledWriter automatically. +std::cout << root; +\endcode + +\section _pbuild Build instructions +The build instructions are located in the file +README.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest README.txt + +\section _pdownload Download +The sources can be downloaded from +SourceForge download page. + +The latest version of the source is available in the project's subversion repository: + +http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/ + +To checkout the source, see the following +instructions. + +\section _news What's New? +The description of latest changes can be found in +NEWS.txt in the top-directory of the project. + +Permanent link to the latest revision of the file in subversion: +latest NEWS.txt + +\section _plinks Project links +- json-cpp home +- json-cpp sourceforge project + +\section _rlinks Related links +- JSON Specification and alternate language implementations. +- YAML A data format designed for human readability. +- UTF-8 and Unicode FAQ. + +\section _license License +See file LICENSE in the top-directory of the project. + +Basically JsonCpp is licensed under MIT license, or public domain if desired +and recognized in your jurisdiction. + +\author Baptiste Lepilleur +*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doc/readme.txt b/tags/jsoncpp/0.6.0-rc2/doc/readme.txt new file mode 100644 index 0000000..0e42cdf --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/readme.txt @@ -0,0 +1 @@ +The documentation is generated using doxygen (http://www.doxygen.org). diff --git a/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox b/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox new file mode 100644 index 0000000..e6fc17a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doc/roadmap.dox @@ -0,0 +1,37 @@ +/*! \page roadmap JsonCpp roadmap + \section ms_release Makes JsonCpp ready for release + - Build system clean-up: + - Fix build on Windows (shared-library build is broken) + - Add enable/disable flag for static and shared library build + - Enhance help + - Platform portability check: (Notes: was ok on last check) + - linux/gcc, + - solaris/cc, + - windows/msvc678, + - aix/vacpp + - Add JsonCpp version to header as numeric for use in preprocessor test + - Remove buggy experimental hash stuff + \section ms_strict Adds a strict mode to reader/parser + Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627). + - Enforce only object or array as root element + - Disable comment support + - Get jsonchecker failing tests to pass in strict mode + \section ms_writer Writter control + Provides more control to determine how specific items are serialized when JSON allow choice: + - Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u". + - Optionally allow escaping of "/" using "\/". + \section ms_separation Expose json reader/writer API that do not impose using Json::Value. + Some typical use-case involve an application specific structure to/from a JSON document. + - Event base parser to allow unserializing a Json document directly in datastructure instead of + using the intermediate Json::Value. + - Stream based parser to serialized a Json document without using Json::Value as input. + - Performance oriented parser/writer: + - Provides an event based parser. Should allow pulling & skipping events for ease of use. + - Provides a JSON document builder: fast only. + \section ms_perfo Performance tuning + - Provides support for static property name definition avoiding allocation + - Static property dictionnary can be provided to JSON reader + - Performance scenario & benchmarking + \section testing Testing + - Adds more tests for unicode parsing (e.g. including surrogate and error detection). +*/ diff --git a/tags/jsoncpp/0.6.0-rc2/doxybuild.py b/tags/jsoncpp/0.6.0-rc2/doxybuild.py new file mode 100644 index 0000000..03ad68d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/doxybuild.py @@ -0,0 +1,169 @@ +"""Script to generate doxygen documentation. +""" + +import re +import os +import os.path +import sys +import shutil +from devtools import tarball + +def find_program(*filenames): + """find a program in folders path_lst, and sets env[var] + @param filenames: a list of possible names of the program to search for + @return: the full path of the filename if found, or '' if filename could not be found +""" + paths = os.environ.get('PATH', '').split(os.pathsep) + suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' + for filename in filenames: + for name in [filename+ext for ext in suffixes.split()]: + for directory in paths: + full_path = os.path.join(directory, name) + if os.path.isfile(full_path): + return full_path + return '' + +def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + print "Can't read source file %s"%sourcefile + raise + for (k,v) in dict.items(): + v = v.replace('\\','\\\\') + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + print "Can't write target file %s"%targetfile + raise + +def run_doxygen(doxygen_path, config_file, working_dir, is_silent): + config_file = os.path.abspath( config_file ) + doxygen_path = doxygen_path + old_cwd = os.getcwd() + try: + os.chdir( working_dir ) + cmd = [doxygen_path, config_file] + print 'Running:', ' '.join( cmd ) + try: + import subprocess + except: + if os.system( ' '.join( cmd ) ) != 0: + print 'Documentation generation failed' + return False + else: + if is_silent: + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + else: + process = subprocess.Popen( cmd ) + stdout, _ = process.communicate() + if process.returncode: + print 'Documentation generation failed:' + print stdout + return False + return True + finally: + os.chdir( old_cwd ) + +def build_doc( options, make_release=False ): + if make_release: + options.make_tarball = True + options.with_dot = True + options.with_html_help = True + options.with_uml_look = True + options.open = False + options.silent = True + + version = open('version','rt').read().strip() + output_dir = 'dist/doxygen' # relative to doc/doxyfile location. + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + top_dir = os.path.abspath( '.' ) + html_output_dirname = 'jsoncpp-api-html-' + version + tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) + warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) + html_output_path = os.path.join( output_dir, html_output_dirname ) + def yesno( bool ): + return bool and 'YES' or 'NO' + subst_keys = { + '%JSONCPP_VERSION%': version, + '%DOC_TOPDIR%': '', + '%TOPDIR%': top_dir, + '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), + '%HAVE_DOT%': yesno(options.with_dot), + '%DOT_PATH%': os.path.split(options.dot_path)[0], + '%HTML_HELP%': yesno(options.with_html_help), + '%UML_LOOK%': yesno(options.with_uml_look), + '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) + } + + if os.path.isdir( output_dir ): + print 'Deleting directory:', output_dir + shutil.rmtree( output_dir ) + if not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + + do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) + ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) + if not options.silent: + print open(warning_log_path, 'rb').read() + index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html')) + print 'Generated documentation can be found in:' + print index_path + if options.open: + import webbrowser + webbrowser.open( 'file://' + index_path ) + if options.make_tarball: + print 'Generating doc tarball to', tarball_path + tarball_sources = [ + output_dir, + 'README.txt', + 'LICENSE', + 'NEWS.txt', + 'version' + ] + tarball_basedir = os.path.join( output_dir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + return tarball_path, html_output_dirname + +def main(): + usage = """%prog + Generates doxygen documentation in build/doxygen. + Optionaly makes a tarball of the documentation to dist/. + + Must be started in the project top directory. + """ + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False, + help="""Enable usage of DOT to generate collaboration diagram""") + parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, + help="""Enable generation of Microsoft HTML HELP""") + parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, + help="""Generates DOT graph without UML look [Default: False]""") + parser.add_option('--open', dest="open", action='store_true', default=False, + help="""Open the HTML index in the web browser after generation""") + parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False, + help="""Generates a tarball of the documentation in dist/ directory""") + parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False, + help="""Hides doxygen output""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + build_doc( options ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h b/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h new file mode 100644 index 0000000..02328d1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/autolink.h @@ -0,0 +1,24 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_AUTOLINK_H_INCLUDED +# define JSON_AUTOLINK_H_INCLUDED + +# include "config.h" + +# ifdef JSON_IN_CPPTL +# include +# endif + +# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL) +# define CPPTL_AUTOLINK_NAME "json" +# undef CPPTL_AUTOLINK_DLL +# ifdef JSON_DLL +# define CPPTL_AUTOLINK_DLL +# endif +# include "autolink.h" +# endif + +#endif // JSON_AUTOLINK_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/config.h b/tags/jsoncpp/0.6.0-rc2/include/json/config.h new file mode 100644 index 0000000..7609d45 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/config.h @@ -0,0 +1,96 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +# define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 +/// If defined, indicates that Json specific container should be used +/// (hash table & simple deque container with customizable allocator). +/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332 +//# define JSON_VALUE_USE_INTERNAL_MAP 1 +/// Force usage of standard new/malloc based allocator instead of memory pool based allocator. +/// The memory pools allocator used optimization (initializing Value and ValueInternalLink +/// as if it was a POD) that may cause some validation tool to report errors. +/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. +//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 + +/// If defined, indicates that Json use exception to report invalid type manipulation +/// instead of C assert macro. +# define JSON_USE_EXCEPTION 1 + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGAMATION + + +# ifdef JSON_IN_CPPTL +# include +# ifndef JSON_USE_CPPTL +# define JSON_USE_CPPTL 1 +# endif +# endif + +# ifdef JSON_IN_CPPTL +# define JSON_API CPPTL_API +# elif defined(JSON_DLL_BUILD) +# define JSON_API __declspec(dllexport) +# elif defined(JSON_DLL) +# define JSON_API __declspec(dllimport) +# else +# define JSON_API +# endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6 +// Microsoft Visual Studio 6 only support conversion from __int64 to double +// (no conversion from unsigned __int64). +#define JSON_USE_INT64_DOUBLE_CONVERSION 1 +#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 + +#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 +/// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +#endif + +#if !defined(JSONCPP_DEPRECATED) +# define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { + typedef int Int; + typedef unsigned int UInt; +# if defined(JSON_NO_INT64) + typedef int LargestInt; + typedef unsigned int LargestUInt; +# undef JSON_HAS_INT64 +# else // if defined(JSON_NO_INT64) + // For Microsoft Visual use specific types as long long is not supported +# if defined(_MSC_VER) // Microsoft Visual Studio + typedef __int64 Int64; + typedef unsigned __int64 UInt64; +# else // if defined(_MSC_VER) // Other platforms, use long long + typedef long long int Int64; + typedef unsigned long long int UInt64; +# endif // if defined(_MSC_VER) + typedef Int64 LargestInt; + typedef UInt64 LargestUInt; +# define JSON_HAS_INT64 +# endif // if defined(JSON_NO_INT64) +} // end namespace Json + + +#endif // JSON_CONFIG_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/features.h b/tags/jsoncpp/0.6.0-rc2/include/json/features.h new file mode 100644 index 0000000..4353278 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/features.h @@ -0,0 +1,49 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +# define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + + /** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ + class JSON_API Features + { + public: + /** \brief A configuration that allows all features and assumes all strings are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c false. + bool strictRoot_; + }; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h b/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h new file mode 100644 index 0000000..ab863da --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/forwards.h @@ -0,0 +1,44 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +# define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + + // writer.h + class FastWriter; + class StyledWriter; + + // reader.h + class Reader; + + // features.h + class Features; + + // value.h + typedef unsigned int ArrayIndex; + class StaticString; + class Path; + class PathArgument; + class Value; + class ValueIteratorBase; + class ValueIterator; + class ValueConstIterator; +#ifdef JSON_VALUE_USE_INTERNAL_MAP + class ValueMapAllocator; + class ValueInternalLink; + class ValueInternalArray; + class ValueInternalMap; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + +} // namespace Json + + +#endif // JSON_FORWARDS_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/json.h b/tags/jsoncpp/0.6.0-rc2/include/json/json.h new file mode 100644 index 0000000..da5fc96 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/json.h @@ -0,0 +1,15 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_JSON_H_INCLUDED +# define JSON_JSON_H_INCLUDED + +# include "autolink.h" +# include "value.h" +# include "reader.h" +# include "writer.h" +# include "features.h" + +#endif // JSON_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/reader.h b/tags/jsoncpp/0.6.0-rc2/include/json/reader.h new file mode 100644 index 0000000..0a324df --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/reader.h @@ -0,0 +1,214 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +# define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "features.h" +# include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include +# include + +namespace Json { + + /** \brief Unserialize a JSON document into a Value. + * + */ + class JSON_API Reader + { + public: + typedef char Char; + typedef const Char *Location; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader( const Features &features ); + + /** \brief Read a Value from a JSON document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const std::string &document, + Value &root, + bool collectComments = true ); + + /** \brief Read a Value from a JSON document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read. + \ Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them back during + * serialization, \c false to discard comments. + * This parameter is ignored if Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an error occurred. + */ + bool parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments = true ); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse( std::istream &is, + Value &root, + bool collectComments = true ); + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed document. + * \return Formatted error message with the list of errors with their location in + * the parsed document. An empty string is returned if no error occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + private: + enum TokenType + { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token + { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo + { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool expectToken( TokenType type, Token &token, const char *message ); + bool readToken( Token &token ); + void skipSpaces(); + bool match( Location pattern, + int patternLength ); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject( Token &token ); + bool readArray( Token &token ); + bool decodeNumber( Token &token ); + bool decodeString( Token &token ); + bool decodeString( Token &token, std::string &decoded ); + bool decodeDouble( Token &token ); + bool decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ); + bool addError( const std::string &message, + Token &token, + Location extra = 0 ); + bool recoverFromError( TokenType skipUntilToken ); + bool addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ); + void skipUntilSpace(); + Value ¤tValue(); + Char getNextChar(); + void getLocationLineAndColumn( Location location, + int &line, + int &column ) const; + std::string getLocationLineAndColumn( Location location ) const; + void addComment( Location begin, + Location end, + CommentPlacement placement ); + void skipCommentTokens( Token &token ); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value *lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; + }; + + /** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() + */ + std::istream& operator>>( std::istream&, Value& ); + +} // namespace Json + +#endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/value.h b/tags/jsoncpp/0.6.0-rc2/include/json/value.h new file mode 100644 index 0000000..32e3455 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/value.h @@ -0,0 +1,1103 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +# define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include + +# ifndef JSON_USE_CPPTL_SMALLMAP +# include +# else +# include +# endif +# ifdef JSON_USE_CPPTL +# include +# endif + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + + /** \brief Type of the value held by a Value object. + */ + enum ValueType + { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). + }; + + enum CommentPlacement + { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for root value) + numberOfCommentPlacement + }; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + + /** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + class JSON_API StaticString + { + public: + explicit StaticString( const char *czstring ) + : str_( czstring ) + { + } + + operator const char *() const + { + return str_; + } + + const char *c_str() const + { + return str_; + } + + private: + const char *str_; + }; + + /** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * values of an #objectValue or #arrayValue can be accessed using operator[]() methods. + * Non const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resize and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtanis default value in the case the required element + * does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + */ + class JSON_API Value + { + friend class ValueIteratorBase; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + friend class ValueInternalLink; + friend class ValueInternalMap; +# endif + public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +# if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value null; + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; + + private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION +# ifndef JSON_VALUE_USE_INTERNAL_MAP + class CZString + { + public: + enum DuplicationPolicy + { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString( ArrayIndex index ); + CZString( const char *cstr, DuplicationPolicy allocate ); + CZString( const CZString &other ); + ~CZString(); + CZString &operator =( const CZString &other ); + bool operator<( const CZString &other ) const; + bool operator==( const CZString &other ) const; + ArrayIndex index() const; + const char *c_str() const; + bool isStaticString() const; + private: + void swap( CZString &other ); + const char *cstr_; + ArrayIndex index_; + }; + + public: +# ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +# else + typedef CppTL::SmallMap ObjectValues; +# endif // ifndef JSON_USE_CPPTL_SMALLMAP +# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. + This is useful since clear() and resize() will not alter types. + + Examples: + \code + Json::Value null_value; // null + Json::Value arr_value(Json::arrayValue); // [] + Json::Value obj_value(Json::objectValue); // {} + \endcode + */ + Value( ValueType type = nullValue ); + Value( Int value ); + Value( UInt value ); +#if defined(JSON_HAS_INT64) + Value( Int64 value ); + Value( UInt64 value ); +#endif // if defined(JSON_HAS_INT64) + Value( double value ); + Value( const char *value ); + Value( const char *beginValue, const char *endValue ); + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * \endcode + */ + Value( const StaticString &value ); + Value( const std::string &value ); +# ifdef JSON_USE_CPPTL + Value( const CppTL::ConstString &value ); +# endif + Value( bool value ); + Value( const Value &other ); + ~Value(); + + Value &operator=( const Value &other ); + /// Swap values. + /// \note Currently, comments are intentionally not swapped, for + /// both logic and efficiency. + void swap( Value &other ); + + ValueType type() const; + + bool operator <( const Value &other ) const; + bool operator <=( const Value &other ) const; + bool operator >=( const Value &other ) const; + bool operator >( const Value &other ) const; + + bool operator ==( const Value &other ) const; + bool operator !=( const Value &other ) const; + + int compare( const Value &other ) const; + + const char *asCString() const; + std::string asString() const; +# ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +# endif + Int asInt() const; + UInt asUInt() const; + Int64 asInt64() const; + UInt64 asUInt64() const; + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isUInt() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo( ValueType other ) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize( ArrayIndex size ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( ArrayIndex index ); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value &operator[]( int index ); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( ArrayIndex index ) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value &operator[]( int index ) const; + + /// If the array contains at least index+1 elements, returns the element value, + /// otherwise returns defaultValue. + Value get( ArrayIndex index, + const Value &defaultValue ) const; + /// Return true if index < size(). + bool isValidIndex( ArrayIndex index ) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value &append( const Value &value ); + + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const char *key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const char *key ) const; + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const std::string &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const std::string &key ) const; + /** \brief Access an object value by name, create a null member if it does not exist. + + * If the object as no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value &operator[]( const StaticString &key ); +# ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value &operator[]( const CppTL::ConstString &key ); + /// Access an object value by name, returns null if there is no member with that name. + const Value &operator[]( const CppTL::ConstString &key ) const; +# endif + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const char *key, + const Value &defaultValue ) const; + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const std::string &key, + const Value &defaultValue ) const; +# ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + Value get( const CppTL::ConstString &key, + const Value &defaultValue ) const; +# endif + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + Value removeMember( const char* key ); + /// Same as removeMember(const char*) + Value removeMember( const std::string &key ); + + /// Return true if the object has a member named key. + bool isMember( const char *key ) const; + /// Return true if the object has a member named key. + bool isMember( const std::string &key ) const; +# ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember( const CppTL::ConstString &key ) const; +# endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + +//# ifdef JSON_USE_CPPTL +// EnumMemberNames enumMemberNames() const; +// EnumValues enumValues() const; +//# endif + + /// Comments must be //... or /* ... */ + void setComment( const char *comment, + CommentPlacement placement ); + /// Comments must be //... or /* ... */ + void setComment( const std::string &comment, + CommentPlacement placement ); + bool hasComment( CommentPlacement placement ) const; + /// Include delimiters and embedded newlines. + std::string getComment( CommentPlacement placement ) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + private: + Value &resolveReference( const char *key, + bool isStatic ); + +# ifdef JSON_VALUE_USE_INTERNAL_MAP + inline bool isItemAvailable() const + { + return itemIsUsed_ == 0; + } + + inline void setItemUsed( bool isUsed = true ) + { + itemIsUsed_ = isUsed ? 1 : 0; + } + + inline bool isMemberNameStatic() const + { + return memberNameIsStatic_ == 0; + } + + inline void setMemberNameIsStatic( bool isStatic ) + { + memberNameIsStatic_ = isStatic ? 1 : 0; + } +# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP + + private: + struct CommentInfo + { + CommentInfo(); + ~CommentInfo(); + + void setComment( const char *text ); + + char *comment_; + }; + + //struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder + { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char *string_; +# ifdef JSON_VALUE_USE_INTERNAL_MAP + ValueInternalArray *array_; + ValueInternalMap *map_; +#else + ObjectValues *map_; +# endif + } value_; + ValueType type_ : 8; + int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. +# ifdef JSON_VALUE_USE_INTERNAL_MAP + unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container. + int memberNameIsStatic_ : 1; // used by the ValueInternalMap container. +# endif + CommentInfo *comments_; + }; + + + /** \brief Experimental and untested: represents an element of the "path" to access a node. + */ + class PathArgument + { + public: + friend class Path; + + PathArgument(); + PathArgument( ArrayIndex index ); + PathArgument( const char *key ); + PathArgument( const std::string &key ); + + private: + enum Kind + { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; + }; + + /** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ + class Path + { + public: + Path( const std::string &path, + const PathArgument &a1 = PathArgument(), + const PathArgument &a2 = PathArgument(), + const PathArgument &a3 = PathArgument(), + const PathArgument &a4 = PathArgument(), + const PathArgument &a5 = PathArgument() ); + + const Value &resolve( const Value &root ) const; + Value resolve( const Value &root, + const Value &defaultValue ) const; + /// Creates the "path" to access the specified node and returns a reference on the node. + Value &make( Value &root ) const; + + private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath( const std::string &path, + const InArgs &in ); + void addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ); + void invalidPath( const std::string &path, + int location ); + + Args args_; + }; + + + +#ifdef JSON_VALUE_USE_INTERNAL_MAP + /** \brief Allocator to customize Value internal map. + * Below is an example of a simple implementation (default implementation actually + * use memory pool for speed). + * \code + class DefaultValueMapAllocator : public ValueMapAllocator + { + public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } + }; + * \endcode + */ + class JSON_API ValueMapAllocator + { + public: + virtual ~ValueMapAllocator(); + virtual ValueInternalMap *newMap() = 0; + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0; + virtual void destructMap( ValueInternalMap *map ) = 0; + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0; + virtual void releaseMapBuckets( ValueInternalLink *links ) = 0; + virtual ValueInternalLink *allocateMapLink() = 0; + virtual void releaseMapLink( ValueInternalLink *link ) = 0; + }; + + /** \brief ValueInternalMap hash-map bucket chain link (for internal use only). + * \internal previous_ & next_ allows for bidirectional traversal. + */ + class JSON_API ValueInternalLink + { + public: + enum { itemPerLink = 6 }; // sizeof(ValueInternalLink) = 128 on 32 bits architecture. + enum InternalFlags { + flagAvailable = 0, + flagUsed = 1 + }; + + ValueInternalLink(); + + ~ValueInternalLink(); + + Value items_[itemPerLink]; + char *keys_[itemPerLink]; + ValueInternalLink *previous_; + ValueInternalLink *next_; + }; + + + /** \brief A linked page based hash-table implementation used internally by Value. + * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked + * list in each bucket to handle collision. There is an addional twist in that + * each node of the collision linked list is a page containing a fixed amount of + * value. This provides a better compromise between memory usage and speed. + * + * Each bucket is made up of a chained list of ValueInternalLink. The last + * link of a given bucket can be found in the 'previous_' field of the following bucket. + * The last link of the last bucket is stored in tailLink_ as it has no following bucket. + * Only the last link of a bucket may contains 'available' item. The last link always + * contains at least one element unless is it the bucket one very first link. + */ + class JSON_API ValueInternalMap + { + friend class ValueIteratorBase; + friend class Value; + public: + typedef unsigned int HashKey; + typedef unsigned int BucketIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState + { + IteratorState() + : map_(0) + , link_(0) + , itemIndex_(0) + , bucketIndex_(0) + { + } + ValueInternalMap *map_; + ValueInternalLink *link_; + BucketIndex itemIndex_; + BucketIndex bucketIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalMap(); + ValueInternalMap( const ValueInternalMap &other ); + ValueInternalMap &operator =( const ValueInternalMap &other ); + ~ValueInternalMap(); + + void swap( ValueInternalMap &other ); + + BucketIndex size() const; + + void clear(); + + bool reserveDelta( BucketIndex growth ); + + bool reserve( BucketIndex newItemCount ); + + const Value *find( const char *key ) const; + + Value *find( const char *key ); + + Value &resolveReference( const char *key, + bool isStatic ); + + void remove( const char *key ); + + void doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ); + + ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex ); + + Value &setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ); + + Value &unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ); + + HashKey hash( const char *key ) const; + + int compare( const ValueInternalMap &other ) const; + + private: + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void incrementBucket( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static const char *key( const IteratorState &iterator ); + static const char *key( const IteratorState &iterator, bool &isStatic ); + static Value &value( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + + private: + ValueInternalLink *buckets_; + ValueInternalLink *tailLink_; + BucketIndex bucketsSize_; + BucketIndex itemCount_; + }; + + /** \brief A simplified deque implementation used internally by Value. + * \internal + * It is based on a list of fixed "page", each page contains a fixed number of items. + * Instead of using a linked-list, a array of pointer is used for fast item look-up. + * Look-up for an element is as follow: + * - compute page index: pageIndex = itemIndex / itemsPerPage + * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage] + * + * Insertion is amortized constant time (only the array containing the index of pointers + * need to be reallocated when items are appended). + */ + class JSON_API ValueInternalArray + { + friend class Value; + friend class ValueIteratorBase; + public: + enum { itemsPerPage = 8 }; // should be a power of 2 for fast divide and modulo. + typedef Value::ArrayIndex ArrayIndex; + typedef unsigned int PageIndex; + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + struct IteratorState // Must be a POD + { + IteratorState() + : array_(0) + , currentPageIndex_(0) + , currentItemIndex_(0) + { + } + ValueInternalArray *array_; + Value **currentPageIndex_; + unsigned int currentItemIndex_; + }; +# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + + ValueInternalArray(); + ValueInternalArray( const ValueInternalArray &other ); + ValueInternalArray &operator =( const ValueInternalArray &other ); + ~ValueInternalArray(); + void swap( ValueInternalArray &other ); + + void clear(); + void resize( ArrayIndex newSize ); + + Value &resolveReference( ArrayIndex index ); + + Value *find( ArrayIndex index ) const; + + ArrayIndex size() const; + + int compare( const ValueInternalArray &other ) const; + + private: + static bool equals( const IteratorState &x, const IteratorState &other ); + static void increment( IteratorState &iterator ); + static void decrement( IteratorState &iterator ); + static Value &dereference( const IteratorState &iterator ); + static Value &unsafeDereference( const IteratorState &iterator ); + static int distance( const IteratorState &x, const IteratorState &y ); + static ArrayIndex indexOf( const IteratorState &iterator ); + void makeBeginIterator( IteratorState &it ) const; + void makeEndIterator( IteratorState &it ) const; + void makeIterator( IteratorState &it, ArrayIndex index ) const; + + void makeIndexValid( ArrayIndex index ); + + Value **pages_; + ArrayIndex size_; + PageIndex pageCount_; + }; + + /** \brief Experimental: do not use. Allocator to customize Value internal array. + * Below is an example of a simple implementation (actual implementation use + * memory pool). + \code +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destruct( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + \endcode + */ + class JSON_API ValueArrayAllocator + { + public: + virtual ~ValueArrayAllocator(); + virtual ValueInternalArray *newArray() = 0; + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0; + virtual void destructArray( ValueInternalArray *array ) = 0; + /** \brief Reallocate array page index. + * Reallocates an array of pointer on each page. + * \param indexes [input] pointer on the current index. May be \c NULL. + * [output] pointer on the new index of at least + * \a minNewIndexCount pages. + * \param indexCount [input] current number of pages in the index. + * [output] number of page the reallocated index can handle. + * \b MUST be >= \a minNewIndexCount. + * \param minNewIndexCount Minimum number of page the new index must be able to + * handle. + */ + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) = 0; + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) = 0; + virtual Value *allocateArrayPage() = 0; + virtual void releaseArrayPage( Value *value ) = 0; + }; +#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP + + + /** \brief base class for Value iterators. + * + */ + class ValueIteratorBase + { + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + ValueIteratorBase(); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIteratorBase( const Value::ObjectValues::iterator ¤t ); +#else + ValueIteratorBase( const ValueInternalArray::IteratorState &state ); + ValueIteratorBase( const ValueInternalMap::IteratorState &state ); +#endif + + bool operator ==( const SelfType &other ) const + { + return isEqual( other ); + } + + bool operator !=( const SelfType &other ) const + { + return !isEqual( other ); + } + + difference_type operator -( const SelfType &other ) const + { + return computeDistance( other ); + } + + /// Return either the index or the member name of the referenced value as a Value. + Value key() const; + + /// Return the index of the referenced Value. -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value. "" if it is not an objectValue. + const char *memberName() const; + + protected: + Value &deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance( const SelfType &other ) const; + + bool isEqual( const SelfType &other ) const; + + void copy( const SelfType &other ); + + private: +#ifndef JSON_VALUE_USE_INTERNAL_MAP + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; +#else + union + { + ValueInternalArray::IteratorState array_; + ValueInternalMap::IteratorState map_; + } iterator_; + bool isArray_; +#endif + }; + + /** \brief const iterator for object and array value. + * + */ + class ValueConstIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef const Value &reference; + typedef const Value *pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueConstIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueConstIterator( const ValueInternalArray::IteratorState &state ); + ValueConstIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + SelfType &operator =( const ValueIteratorBase &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + + /** \brief Iterator for object and array value. + */ + class ValueIterator : public ValueIteratorBase + { + friend class Value; + public: + typedef unsigned int size_t; + typedef int difference_type; + typedef Value &reference; + typedef Value *pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + ValueIterator( const ValueConstIterator &other ); + ValueIterator( const ValueIterator &other ); + private: + /*! \internal Use by Value to create an iterator. + */ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + explicit ValueIterator( const Value::ObjectValues::iterator ¤t ); +#else + ValueIterator( const ValueInternalArray::IteratorState &state ); + ValueIterator( const ValueInternalMap::IteratorState &state ); +#endif + public: + + SelfType &operator =( const SelfType &other ); + + SelfType operator++( int ) + { + SelfType temp( *this ); + ++*this; + return temp; + } + + SelfType operator--( int ) + { + SelfType temp( *this ); + --*this; + return temp; + } + + SelfType &operator--() + { + decrement(); + return *this; + } + + SelfType &operator++() + { + increment(); + return *this; + } + + reference operator *() const + { + return deref(); + } + }; + + +} // namespace Json + + +#endif // CPPTL_JSON_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/include/json/writer.h b/tags/jsoncpp/0.6.0-rc2/include/json/writer.h new file mode 100644 index 0000000..4789363 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/include/json/writer.h @@ -0,0 +1,185 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +# define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include + +namespace Json { + + class Value; + + /** \brief Abstract class for writers. + */ + class JSON_API Writer + { + public: + virtual ~Writer(); + + virtual std::string write( const Value &root ) = 0; + }; + + /** \brief Outputs a Value in JSON format without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + */ + class JSON_API FastWriter : public Writer + { + public: + FastWriter(); + virtual ~FastWriter(){} + + void enableYAMLCompatibility(); + + public: // overridden from Writer + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + + std::string document_; + bool yamlCompatiblityEnabled_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledWriter: public Writer + { + public: + StyledWriter(); + virtual ~StyledWriter(){} + + public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + virtual std::string write( const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; + }; + + /** \brief Writes a Value in JSON format in a human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value types, + * and all the values fit on one lines, then print the array on a single line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + */ + class JSON_API StyledStreamWriter + { + public: + StyledStreamWriter( std::string indentation="\t" ); + ~StyledStreamWriter(){} + + public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not return a value. + */ + void write( std::ostream &out, const Value &root ); + + private: + void writeValue( const Value &value ); + void writeArrayValue( const Value &value ); + bool isMultineArray( const Value &value ); + void pushValue( const std::string &value ); + void writeIndent(); + void writeWithIndent( const std::string &value ); + void indent(); + void unindent(); + void writeCommentBeforeValue( const Value &root ); + void writeCommentAfterValueOnSameLine( const Value &root ); + bool hasCommentForValue( const Value &value ); + static std::string normalizeEOL( const std::string &text ); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_; + }; + +# if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( Int value ); + std::string JSON_API valueToString( UInt value ); +# endif // if defined(JSON_HAS_INT64) + std::string JSON_API valueToString( LargestInt value ); + std::string JSON_API valueToString( LargestUInt value ); + std::string JSON_API valueToString( double value ); + std::string JSON_API valueToString( bool value ); + std::string JSON_API valueToQuotedString( const char *value ); + + /// \brief Output using the StyledStreamWriter. + /// \see Json::operator>>() + std::ostream& operator<<( std::ostream&, const Value &root ); + +} // namespace Json + + + +#endif // JSON_WRITER_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln new file mode 100644 index 0000000..5bfa366 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsoncpp.sln @@ -0,0 +1,46 @@ +Microsoft Visual Studio Solution File, Format Version 8.00 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcproj", "{B84F7231-16CE-41D8-8C08-7B523FF4225B}" + ProjectSection(ProjectDependencies) = postProject + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}" + ProjectSection(ProjectDependencies) = postProject + {B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B} + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfiguration) = preSolution + Debug = Debug + dummy = dummy + Release = Release + EndGlobalSection + GlobalSection(ProjectConfiguration) = postSolution + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.ActiveCfg = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Debug.Build.0 = Debug|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.ActiveCfg = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.dummy.Build.0 = dummy|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.ActiveCfg = Release|Win32 + {B84F7231-16CE-41D8-8C08-7B523FF4225B}.Release.Build.0 = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Debug.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.ActiveCfg = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32 + {25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32 + {B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32 + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + EndGlobalSection + GlobalSection(ExtensibilityAddIns) = postSolution + EndGlobalSection +EndGlobal diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj new file mode 100644 index 0000000..99a4dd6 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/jsontest.vcproj @@ -0,0 +1,119 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj new file mode 100644 index 0000000..2d7bf99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/lib_json.vcproj @@ -0,0 +1,214 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj new file mode 100644 index 0000000..df36700 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makefiles/vs71/test_lib_json.vcproj @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tags/jsoncpp/0.6.0-rc2/makerelease.py b/tags/jsoncpp/0.6.0-rc2/makerelease.py new file mode 100644 index 0000000..6b8eec3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/makerelease.py @@ -0,0 +1,380 @@ +"""Tag the sandbox for release, make source and doc tarballs. + +Requires Python 2.6 + +Example of invocation (use to test the script): +python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev + +When testing this script: +python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev + +Example of invocation when doing a release: +python makerelease.py 0.5.0 0.6.0-dev +""" +import os.path +import subprocess +import sys +import doxybuild +import subprocess +import xml.etree.ElementTree as ElementTree +import shutil +import urllib2 +import tempfile +import os +import time +from devtools import antglob, fixeol, tarball +import amalgamate + +SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' +SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' +SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' +SOURCEFORGE_PROJECT = 'jsoncpp' + +def set_version( version ): + with open('version','wb') as f: + f.write( version.strip() ) + +def rmdir_if_exist( dir_path ): + if os.path.isdir( dir_path ): + shutil.rmtree( dir_path ) + +class SVNError(Exception): + pass + +def svn_command( command, *args ): + cmd = ['svn', '--non-interactive', command] + list(args) + print 'Running:', ' '.join( cmd ) + process = subprocess.Popen( cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + error = SVNError( 'SVN command failed:\n' + stdout ) + error.returncode = process.returncode + raise error + return stdout + +def check_no_pending_commit(): + """Checks that there is no pending commit in the sandbox.""" + stdout = svn_command( 'status', '--xml' ) + etree = ElementTree.fromstring( stdout ) + msg = [] + for entry in etree.getiterator( 'entry' ): + path = entry.get('path') + status = entry.find('wc-status').get('item') + if status != 'unversioned' and path != 'version': + msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) + if msg: + msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) + return '\n'.join( msg ) + +def svn_join_url( base_url, suffix ): + if not base_url.endswith('/'): + base_url += '/' + if suffix.startswith('/'): + suffix = suffix[1:] + return base_url + suffix + +def svn_check_if_tag_exist( tag_url ): + """Checks if a tag exist. + Returns: True if the tag exist, False otherwise. + """ + try: + list_stdout = svn_command( 'list', tag_url ) + except SVNError, e: + if e.returncode != 1 or not str(e).find('tag_url'): + raise e + # otherwise ignore error, meaning tag does not exist + return False + return True + +def svn_commit( message ): + """Commit the sandbox, providing the specified comment. + """ + svn_command( 'ci', '-m', message ) + +def svn_tag_sandbox( tag_url, message ): + """Makes a tag based on the sandbox revisions. + """ + svn_command( 'copy', '-m', message, '.', tag_url ) + +def svn_remove_tag( tag_url, message ): + """Removes an existing tag. + """ + svn_command( 'delete', '-m', message, tag_url ) + +def svn_export( tag_url, export_dir ): + """Exports the tag_url revision to export_dir. + Target directory, including its parent is created if it does not exist. + If the directory export_dir exist, it is deleted before export proceed. + """ + rmdir_if_exist( export_dir ) + svn_command( 'export', tag_url, export_dir ) + +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + +def download( url, target_path ): + """Download file represented by url to target_path. + """ + f = urllib2.urlopen( url ) + try: + data = f.read() + finally: + f.close() + fout = open( target_path, 'wb' ) + try: + fout.write( data ) + finally: + fout.close() + +def check_compile( distcheck_top_dir, platform ): + cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] + print 'Running:', ' '.join( cmd ) + log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) + flog = open( log_path, 'wb' ) + try: + process = subprocess.Popen( cmd, + stdout=flog, + stderr=subprocess.STDOUT, + cwd=distcheck_top_dir ) + stdout = process.communicate()[0] + status = (process.returncode == 0) + finally: + flog.close() + return (status, log_path) + +def write_tempfile( content, **kwargs ): + fd, path = tempfile.mkstemp( **kwargs ) + f = os.fdopen( fd, 'wt' ) + try: + f.write( content ) + finally: + f.close() + return path + +class SFTPError(Exception): + pass + +def run_sftp_batch( userhost, sftp, batch, retry=0 ): + path = write_tempfile( batch, suffix='.sftp', text=True ) + # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc + cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] + error = None + for retry_index in xrange(0, max(1,retry)): + heading = retry_index == 0 and 'Running:' or 'Retrying:' + print heading, ' '.join( cmd ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode != 0: + error = SFTPError( 'SFTP batch failed:\n' + stdout ) + else: + break + if error: + raise error + return stdout + +def sourceforge_web_synchro( sourceforge_project, doc_dir, + user=None, sftp='sftp' ): + """Notes: does not synchronize sub-directory of doc-dir. + """ + userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) + stdout = run_sftp_batch( userhost, sftp, """ +cd htdocs +dir +exit +""" ) + existing_paths = set() + collect = 0 + for line in stdout.split('\n'): + line = line.strip() + if not collect and line.endswith('> dir'): + collect = True + elif collect and line.endswith('> exit'): + break + elif collect == 1: + collect = 2 + elif collect == 2: + path = line.strip().split()[-1:] + if path and path[0] not in ('.', '..'): + existing_paths.add( path[0] ) + upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) + paths_to_remove = existing_paths - upload_paths + if paths_to_remove: + print 'Removing the following file from web:' + print '\n'.join( paths_to_remove ) + stdout = run_sftp_batch( userhost, sftp, """cd htdocs +rm %s +exit""" % ' '.join(paths_to_remove) ) + print 'Uploading %d files:' % len(upload_paths) + batch_size = 10 + upload_paths = list(upload_paths) + start_time = time.time() + for index in xrange(0,len(upload_paths),batch_size): + paths = upload_paths[index:index+batch_size] + file_per_sec = (time.time() - start_time) / (index+1) + remaining_files = len(upload_paths) - index + remaining_sec = file_per_sec * remaining_files + print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) + run_sftp_batch( userhost, sftp, """cd htdocs +lcd %s +mput %s +exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) + +def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): + userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) + run_sftp_batch( userhost, sftp, """ +mput %s +exit +""" % (' '.join(paths),) ) + + +def main(): + usage = """%prog release_version next_dev_version +Update 'version' file to release_version and commit. +Generates the document tarball. +Tags the sandbox revision with release_version. +Update 'version' file to next_dev_version and commit. + +Performs an svn export of tag release version, and build a source tarball. + +Must be started in the project top directory. + +Warning: --force should only be used when developping/testing the release script. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'), + help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") + parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'), + help="""Path to Doxygen tool. [Default: %default]""") + parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False, + help="""Ignore pending commit. [Default: %default]""") + parser.add_option('--retag', dest="retag_release", action='store_true', default=False, + help="""Overwrite release existing tag if it exist. [Default: %default]""") + parser.add_option('-p', '--platforms', dest="platforms", action='store', default='', + help="""Comma separated list of platform passed to scons for build check.""") + parser.add_option('--no-test', dest="no_test", action='store_true', default=False, + help="""Skips build check.""") + parser.add_option('--no-web', dest="no_web", action='store_true', default=False, + help="""Do not update web site.""") + parser.add_option('-u', '--upload-user', dest="user", action='store', + help="""Sourceforge user for SFTP documentation upload.""") + parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'), + help="""Path of the SFTP compatible binary used to upload the documentation.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 2: + parser.error( 'release_version missing on command-line.' ) + release_version = args[0] + next_version = args[1] + + if not options.platforms and not options.no_test: + parser.error( 'You must specify either --platform or --no-test option.' ) + + if options.ignore_pending_commit: + msg = '' + else: + msg = check_no_pending_commit() + if not msg: + print 'Setting version to', release_version + set_version( release_version ) + svn_commit( 'Release ' + release_version ) + tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' +## doc_dirname = r'jsoncpp-api-html-0.5.0' +## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' + doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) + doc_distcheck_dir = 'dist/doccheck' + tarball.decompress( doc_tarball_path, doc_distcheck_dir ) + doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) + + amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir + print 'Generating amalgamation source tarball to', amalgamation_tarball_path + amalgamation_dir = 'dist/amalgamation' + amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) + amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version + tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], + amalgamation_dir, prefix_dir=amalgamation_source_dir ) + + # Decompress source tarball, download and install scons-local + distcheck_dir = 'dist/distcheck' + distcheck_top_dir = distcheck_dir + '/' + source_dir + print 'Decompressing source tarball to', distcheck_dir + rmdir_if_exist( distcheck_dir ) + tarball.decompress( source_tarball_path, distcheck_dir ) + scons_local_path = 'dist/scons-local.tar.gz' + print 'Downloading scons-local to', scons_local_path + download( SCONS_LOCAL_URL, scons_local_path ) + print 'Decompressing scons-local to', distcheck_top_dir + tarball.decompress( scons_local_path, distcheck_top_dir ) + + # Run compilation + print 'Compiling decompressed tarball' + all_build_status = True + for platform in options.platforms.split(','): + print 'Testing platform:', platform + build_status, log_path = check_compile( distcheck_top_dir, platform ) + print 'see build log:', log_path + print build_status and '=> ok' or '=> FAILED' + all_build_status = all_build_status and build_status + if not build_status: + print 'Testing failed on at least one platform, aborting...' + svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) + sys.exit(1) + if options.user: + if not options.no_web: + print 'Uploading documentation using user', options.user + sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) + print 'Completed documentation upload' + print 'Uploading source and documentation tarballs for release using user', options.user + sourceforge_release_tarball( SOURCEFORGE_PROJECT, + [source_tarball_path, doc_tarball_path], + user=options.user, sftp=options.sftp ) + print 'Source and doc release tarballs uploaded' + else: + print 'No upload user specified. Web site and download tarbal were not uploaded.' + print 'Tarball can be found at:', doc_tarball_path + + # Set next version number and commit + set_version( next_version ) + svn_commit( 'Released ' + release_version ) + else: + sys.stderr.write( msg + '\n' ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py new file mode 100644 index 0000000..8ee3cbb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py new file mode 100644 index 0000000..864ff40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py new file mode 100644 index 0000000..4d30585 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py b/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py new file mode 100644 index 0000000..f543200 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp new file mode 100644 index 0000000..dfb6150 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/main.cpp @@ -0,0 +1,269 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +/* This executable is used for testing parser/writer using real JSON files. + */ + + +#include +#include // sort +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1310 +# pragma warning( disable: 4996 ) // disable fopen deprecation warning +#endif + +static std::string +readInputTestFile( const char *path ) +{ + FILE *file = fopen( path, "rb" ); + if ( !file ) + return std::string(""); + fseek( file, 0, SEEK_END ); + long size = ftell( file ); + fseek( file, 0, SEEK_SET ); + std::string text; + char *buffer = new char[size+1]; + buffer[size] = 0; + if ( fread( buffer, 1, size, file ) == (unsigned long)size ) + text = buffer; + fclose( file ); + delete[] buffer; + return text; +} + + +static void +printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) +{ + switch ( value.type() ) + { + case Json::nullValue: + fprintf( fout, "%s=null\n", path.c_str() ); + break; + case Json::intValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() ); + break; + case Json::uintValue: + fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); + break; + case Json::realValue: + fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + break; + case Json::stringValue: + fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); + break; + case Json::booleanValue: + fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" ); + break; + case Json::arrayValue: + { + fprintf( fout, "%s=[]\n", path.c_str() ); + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + static char buffer[16]; + sprintf( buffer, "[%d]", index ); + printValueTree( fout, value[index], path + buffer ); + } + } + break; + case Json::objectValue: + { + fprintf( fout, "%s={}\n", path.c_str() ); + Json::Value::Members members( value.getMemberNames() ); + std::sort( members.begin(), members.end() ); + std::string suffix = *(path.end()-1) == '.' ? "" : "."; + for ( Json::Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + printValueTree( fout, value[name], path + suffix + name ); + } + } + break; + default: + break; + } +} + + +static int +parseAndSaveValueTree( const std::string &input, + const std::string &actual, + const std::string &kind, + Json::Value &root, + const Json::Features &features, + bool parseOnly ) +{ + Json::Reader reader( features ); + bool parsingSuccessful = reader.parse( input, root ); + if ( !parsingSuccessful ) + { + printf( "Failed to parse %s file: \n%s\n", + kind.c_str(), + reader.getFormattedErrorMessages().c_str() ); + return 1; + } + + if ( !parseOnly ) + { + FILE *factual = fopen( actual.c_str(), "wt" ); + if ( !factual ) + { + printf( "Failed to create %s actual file.\n", kind.c_str() ); + return 2; + } + printValueTree( factual, root ); + fclose( factual ); + } + return 0; +} + + +static int +rewriteValueTree( const std::string &rewritePath, + const Json::Value &root, + std::string &rewrite ) +{ + //Json::FastWriter writer; + //writer.enableYAMLCompatibility(); + Json::StyledWriter writer; + rewrite = writer.write( root ); + FILE *fout = fopen( rewritePath.c_str(), "wt" ); + if ( !fout ) + { + printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() ); + return 2; + } + fprintf( fout, "%s\n", rewrite.c_str() ); + fclose( fout ); + return 0; +} + + +static std::string +removeSuffix( const std::string &path, + const std::string &extension ) +{ + if ( extension.length() >= path.length() ) + return std::string(""); + std::string suffix = path.substr( path.length() - extension.length() ); + if ( suffix != extension ) + return std::string(""); + return path.substr( 0, path.length() - extension.length() ); +} + + +static void +printConfig() +{ + // Print the configuration used to compile JsonCpp +#if defined(JSON_NO_INT64) + printf( "JSON_NO_INT64=1\n" ); +#else + printf( "JSON_NO_INT64=0\n" ); +#endif +} + + +static int +printUsage( const char *argv[] ) +{ + printf( "Usage: %s [--strict] input-json-file", argv[0] ); + return 3; +} + + +int +parseCommandLine( int argc, const char *argv[], + Json::Features &features, std::string &path, + bool &parseOnly ) +{ + parseOnly = false; + if ( argc < 2 ) + { + return printUsage( argv ); + } + + int index = 1; + if ( std::string(argv[1]) == "--json-checker" ) + { + features = Json::Features::strictMode(); + parseOnly = true; + ++index; + } + + if ( std::string(argv[1]) == "--json-config" ) + { + printConfig(); + return 3; + } + + if ( index == argc || index + 1 < argc ) + { + return printUsage( argv ); + } + + path = argv[index]; + return 0; +} + + +int main( int argc, const char *argv[] ) +{ + std::string path; + Json::Features features; + bool parseOnly; + int exitCode = parseCommandLine( argc, argv, features, path, parseOnly ); + if ( exitCode != 0 ) + { + return exitCode; + } + + try + { + std::string input = readInputTestFile( path.c_str() ); + if ( input.empty() ) + { + printf( "Failed to read input or empty input: %s\n", path.c_str() ); + return 3; + } + + std::string basePath = removeSuffix( argv[1], ".json" ); + if ( !parseOnly && basePath.empty() ) + { + printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() ); + return 3; + } + + std::string actualPath = basePath + ".actual"; + std::string rewritePath = basePath + ".rewrite"; + std::string rewriteActualPath = basePath + ".actual-rewrite"; + + Json::Value root; + exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly ); + if ( exitCode == 0 && !parseOnly ) + { + std::string rewrite; + exitCode = rewriteValueTree( rewritePath, root, rewrite ); + if ( exitCode == 0 ) + { + Json::Value rewriteRoot; + exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath, + "rewrite", rewriteRoot, features, parseOnly ); + } + } + } + catch ( const std::exception &e ) + { + printf( "Unhandled exception:\n%s\n", e.what() ); + exitCode = 1; + } + + return exitCode; +} + diff --git a/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript new file mode 100644 index 0000000..6e68e31 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/jsontestrunner/sconscript @@ -0,0 +1,9 @@ +Import( 'env_testing buildJSONTests' ) + +buildJSONTests( env_testing, Split( """ + main.cpp + """ ), + 'jsontestrunner' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('jsontestrunner', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h new file mode 100644 index 0000000..173e2ed --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_batchallocator.h @@ -0,0 +1,130 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED +# define JSONCPP_BATCHALLOCATOR_H_INCLUDED + +# include +# include + +# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +namespace Json { + +/* Fast memory allocator. + * + * This memory allocator allocates memory for a batch of object (specified by + * the page size, the number of object in each page). + * + * It does not allow the destruction of a single object. All the allocated objects + * can be destroyed at once. The memory can be either released or reused for future + * allocation. + * + * The in-place new operator must be used to construct the object using the pointer + * returned by allocate. + */ +template +class BatchAllocator +{ +public: + typedef AllocatedType Type; + + BatchAllocator( unsigned int objectsPerPage = 255 ) + : freeHead_( 0 ) + , objectsPerPage_( objectsPerPage ) + { +// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() ); + assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space. + assert( objectsPerPage >= 16 ); + batches_ = allocateBatch( 0 ); // allocated a dummy page + currentBatch_ = batches_; + } + + ~BatchAllocator() + { + for ( BatchInfo *batch = batches_; batch; ) + { + BatchInfo *nextBatch = batch->next_; + free( batch ); + batch = nextBatch; + } + } + + /// allocate space for an array of objectPerAllocation object. + /// @warning it is the responsability of the caller to call objects constructors. + AllocatedType *allocate() + { + if ( freeHead_ ) // returns node from free list. + { + AllocatedType *object = freeHead_; + freeHead_ = *(AllocatedType **)object; + return object; + } + if ( currentBatch_->used_ == currentBatch_->end_ ) + { + currentBatch_ = currentBatch_->next_; + while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ ) + currentBatch_ = currentBatch_->next_; + + if ( !currentBatch_ ) // no free batch found, allocate a new one + { + currentBatch_ = allocateBatch( objectsPerPage_ ); + currentBatch_->next_ = batches_; // insert at the head of the list + batches_ = currentBatch_; + } + } + AllocatedType *allocated = currentBatch_->used_; + currentBatch_->used_ += objectPerAllocation; + return allocated; + } + + /// Release the object. + /// @warning it is the responsability of the caller to actually destruct the object. + void release( AllocatedType *object ) + { + assert( object != 0 ); + *(AllocatedType **)object = freeHead_; + freeHead_ = object; + } + +private: + struct BatchInfo + { + BatchInfo *next_; + AllocatedType *used_; + AllocatedType *end_; + AllocatedType buffer_[objectPerAllocation]; + }; + + // disabled copy constructor and assignement operator. + BatchAllocator( const BatchAllocator & ); + void operator =( const BatchAllocator &); + + static BatchInfo *allocateBatch( unsigned int objectsPerPage ) + { + const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation + + sizeof(AllocatedType) * objectPerAllocation * objectsPerPage; + BatchInfo *batch = static_cast( malloc( mallocSize ) ); + batch->next_ = 0; + batch->used_ = batch->buffer_; + batch->end_ = batch->buffer_ + objectsPerPage; + return batch; + } + + BatchInfo *batches_; + BatchInfo *currentBatch_; + /// Head of a single linked list within the allocated space of freeed object + AllocatedType *freeHead_; + unsigned int objectsPerPage_; +}; + + +} // namespace Json + +# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION + +#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED + diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl new file mode 100644 index 0000000..3a532ad --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalarray.inl @@ -0,0 +1,456 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueArrayAllocator::~ValueArrayAllocator() +{ +} + +// ////////////////////////////////////////////////////////////////// +// class DefaultValueArrayAllocator +// ////////////////////////////////////////////////////////////////// +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + return new ValueInternalArray(); + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + return new ValueInternalArray( other ); + } + + virtual void destructArray( ValueInternalArray *array ) + { + delete array; + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + free( value ); + } +}; + +#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueArrayAllocator : public ValueArrayAllocator +{ +public: // overridden from ValueArrayAllocator + virtual ~DefaultValueArrayAllocator() + { + } + + virtual ValueInternalArray *newArray() + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray(); // placement new + return array; + } + + virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) + { + ValueInternalArray *array = arraysAllocator_.allocate(); + new (array) ValueInternalArray( other ); // placement new + return array; + } + + virtual void destructArray( ValueInternalArray *array ) + { + if ( array ) + { + array->~ValueInternalArray(); + arraysAllocator_.release( array ); + } + } + + virtual void reallocateArrayPageIndex( Value **&indexes, + ValueInternalArray::PageIndex &indexCount, + ValueInternalArray::PageIndex minNewIndexCount ) + { + ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1; + if ( minNewIndexCount > newIndexCount ) + newIndexCount = minNewIndexCount; + void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); + if ( !newIndexes ) + throw std::bad_alloc(); + indexCount = newIndexCount; + indexes = static_cast( newIndexes ); + } + virtual void releaseArrayPageIndex( Value **indexes, + ValueInternalArray::PageIndex indexCount ) + { + if ( indexes ) + free( indexes ); + } + + virtual Value *allocateArrayPage() + { + return static_cast( pagesAllocator_.allocate() ); + } + + virtual void releaseArrayPage( Value *value ) + { + if ( value ) + pagesAllocator_.release( value ); + } +private: + BatchAllocator arraysAllocator_; + BatchAllocator pagesAllocator_; +}; +#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR + +static ValueArrayAllocator *&arrayAllocator() +{ + static DefaultValueArrayAllocator defaultAllocator; + static ValueArrayAllocator *arrayAllocator = &defaultAllocator; + return arrayAllocator; +} + +static struct DummyArrayAllocatorInitializer { + DummyArrayAllocatorInitializer() + { + arrayAllocator(); // ensure arrayAllocator() statics are initialized before main(). + } +} dummyArrayAllocatorInitializer; + +// ////////////////////////////////////////////////////////////////// +// class ValueInternalArray +// ////////////////////////////////////////////////////////////////// +bool +ValueInternalArray::equals( const IteratorState &x, + const IteratorState &other ) +{ + return x.array_ == other.array_ + && x.currentItemIndex_ == other.currentItemIndex_ + && x.currentPageIndex_ == other.currentPageIndex_; +} + + +void +ValueInternalArray::increment( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + != it.array_->size_, + "ValueInternalArray::increment(): moving iterator beyond end" ); + ++(it.currentItemIndex_); + if ( it.currentItemIndex_ == itemsPerPage ) + { + it.currentItemIndex_ = 0; + ++(it.currentPageIndex_); + } +} + + +void +ValueInternalArray::decrement( IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_ + && it.currentItemIndex_ == 0, + "ValueInternalArray::decrement(): moving iterator beyond end" ); + if ( it.currentItemIndex_ == 0 ) + { + it.currentItemIndex_ = itemsPerPage-1; + --(it.currentPageIndex_); + } + else + { + --(it.currentItemIndex_); + } +} + + +Value & +ValueInternalArray::unsafeDereference( const IteratorState &it ) +{ + return (*(it.currentPageIndex_))[it.currentItemIndex_]; +} + + +Value & +ValueInternalArray::dereference( const IteratorState &it ) +{ + JSON_ASSERT_MESSAGE( it.array_ && + (it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_ + < it.array_->size_, + "ValueInternalArray::dereference(): dereferencing invalid iterator" ); + return unsafeDereference( it ); +} + +void +ValueInternalArray::makeBeginIterator( IteratorState &it ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = 0; + it.currentPageIndex_ = pages_; +} + + +void +ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const +{ + it.array_ = const_cast( this ); + it.currentItemIndex_ = index % itemsPerPage; + it.currentPageIndex_ = pages_ + index / itemsPerPage; +} + + +void +ValueInternalArray::makeEndIterator( IteratorState &it ) const +{ + makeIterator( it, size_ ); +} + + +ValueInternalArray::ValueInternalArray() + : pages_( 0 ) + , size_( 0 ) + , pageCount_( 0 ) +{ +} + + +ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) + : pages_( 0 ) + , pageCount_( 0 ) + , size_( other.size_ ) +{ + PageIndex minNewPages = other.size_ / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, + "ValueInternalArray::reserve(): bad reallocation" ); + IteratorState itOther; + other.makeBeginIterator( itOther ); + Value *value; + for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) ) + { + if ( index % itemsPerPage == 0 ) + { + PageIndex pageIndex = index / itemsPerPage; + value = arrayAllocator()->allocateArrayPage(); + pages_[pageIndex] = value; + } + new (value) Value( dereference( itOther ) ); + } +} + + +ValueInternalArray & +ValueInternalArray::operator =( const ValueInternalArray &other ) +{ + ValueInternalArray temp( other ); + swap( temp ); + return *this; +} + + +ValueInternalArray::~ValueInternalArray() +{ + // destroy all constructed items + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + // release all pages + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + // release pages index + arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ ); +} + + +void +ValueInternalArray::swap( ValueInternalArray &other ) +{ + Value **tempPages = pages_; + pages_ = other.pages_; + other.pages_ = tempPages; + ArrayIndex tempSize = size_; + size_ = other.size_; + other.size_ = tempSize; + PageIndex tempPageCount = pageCount_; + pageCount_ = other.pageCount_; + other.pageCount_ = tempPageCount; +} + +void +ValueInternalArray::clear() +{ + ValueInternalArray dummy; + swap( dummy ); +} + + +void +ValueInternalArray::resize( ArrayIndex newSize ) +{ + if ( newSize == 0 ) + clear(); + else if ( newSize < size_ ) + { + IteratorState it; + IteratorState itEnd; + makeIterator( it, newSize ); + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + value->~Value(); + } + PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage; + PageIndex lastPageIndex = size_ / itemsPerPage; + for ( ; pageIndex < lastPageIndex; ++pageIndex ) + arrayAllocator()->releaseArrayPage( pages_[pageIndex] ); + size_ = newSize; + } + else if ( newSize > size_ ) + resolveReference( newSize ); +} + + +void +ValueInternalArray::makeIndexValid( ArrayIndex index ) +{ + // Need to enlarge page index ? + if ( index >= pageCount_ * itemsPerPage ) + { + PageIndex minNewPages = (index + 1) / itemsPerPage; + arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); + JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" ); + } + + // Need to allocate new pages ? + ArrayIndex nextPageIndex = + (size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage + : size_; + if ( nextPageIndex <= index ) + { + PageIndex pageIndex = nextPageIndex / itemsPerPage; + PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1; + for ( ; pageToAllocate-- > 0; ++pageIndex ) + pages_[pageIndex] = arrayAllocator()->allocateArrayPage(); + } + + // Initialize all new entries + IteratorState it; + IteratorState itEnd; + makeIterator( it, size_ ); + size_ = index + 1; + makeIterator( itEnd, size_ ); + for ( ; !equals(it,itEnd); increment(it) ) + { + Value *value = &dereference(it); + new (value) Value(); // Construct a default value using placement new + } +} + +Value & +ValueInternalArray::resolveReference( ArrayIndex index ) +{ + if ( index >= size_ ) + makeIndexValid( index ); + return pages_[index/itemsPerPage][index%itemsPerPage]; +} + +Value * +ValueInternalArray::find( ArrayIndex index ) const +{ + if ( index >= size_ ) + return 0; + return &(pages_[index/itemsPerPage][index%itemsPerPage]); +} + +ValueInternalArray::ArrayIndex +ValueInternalArray::size() const +{ + return size_; +} + +int +ValueInternalArray::distance( const IteratorState &x, const IteratorState &y ) +{ + return indexOf(y) - indexOf(x); +} + + +ValueInternalArray::ArrayIndex +ValueInternalArray::indexOf( const IteratorState &iterator ) +{ + if ( !iterator.array_ ) + return ArrayIndex(-1); + return ArrayIndex( + (iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage + + iterator.currentItemIndex_ ); +} + + +int +ValueInternalArray::compare( const ValueInternalArray &other ) const +{ + int sizeDiff( size_ - other.size_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + + for ( ArrayIndex index =0; index < size_; ++index ) + { + int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare( + other.pages_[index/itemsPerPage][index%itemsPerPage] ); + if ( diff != 0 ) + return diff; + } + return 0; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl new file mode 100644 index 0000000..f2fa160 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_internalmap.inl @@ -0,0 +1,615 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueInternalMap +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) ); + * This optimization is used by the fast allocator. + */ +ValueInternalLink::ValueInternalLink() + : previous_( 0 ) + , next_( 0 ) +{ +} + +ValueInternalLink::~ValueInternalLink() +{ + for ( int index =0; index < itemPerLink; ++index ) + { + if ( !items_[index].isItemAvailable() ) + { + if ( !items_[index].isMemberNameStatic() ) + free( keys_[index] ); + } + else + break; + } +} + + + +ValueMapAllocator::~ValueMapAllocator() +{ +} + +#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + return new ValueInternalMap(); + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + return new ValueInternalMap( other ); + } + + virtual void destructMap( ValueInternalMap *map ) + { + delete map; + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + return new ValueInternalLink(); + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + delete link; + } +}; +#else +/// @todo make this thread-safe (lock when accessign batch allocator) +class DefaultValueMapAllocator : public ValueMapAllocator +{ +public: // overridden from ValueMapAllocator + virtual ValueInternalMap *newMap() + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap(); // placement new + return map; + } + + virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) + { + ValueInternalMap *map = mapsAllocator_.allocate(); + new (map) ValueInternalMap( other ); // placement new + return map; + } + + virtual void destructMap( ValueInternalMap *map ) + { + if ( map ) + { + map->~ValueInternalMap(); + mapsAllocator_.release( map ); + } + } + + virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) + { + return new ValueInternalLink[size]; + } + + virtual void releaseMapBuckets( ValueInternalLink *links ) + { + delete [] links; + } + + virtual ValueInternalLink *allocateMapLink() + { + ValueInternalLink *link = linksAllocator_.allocate(); + memset( link, 0, sizeof(ValueInternalLink) ); + return link; + } + + virtual void releaseMapLink( ValueInternalLink *link ) + { + link->~ValueInternalLink(); + linksAllocator_.release( link ); + } +private: + BatchAllocator mapsAllocator_; + BatchAllocator linksAllocator_; +}; +#endif + +static ValueMapAllocator *&mapAllocator() +{ + static DefaultValueMapAllocator defaultAllocator; + static ValueMapAllocator *mapAllocator = &defaultAllocator; + return mapAllocator; +} + +static struct DummyMapAllocatorInitializer { + DummyMapAllocatorInitializer() + { + mapAllocator(); // ensure mapAllocator() statics are initialized before main(). + } +} dummyMapAllocatorInitializer; + + + +// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32. + +/* +use linked list hash map. +buckets array is a container. +linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124) +value have extra state: valid, available, deleted +*/ + + +ValueInternalMap::ValueInternalMap() + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ +} + + +ValueInternalMap::ValueInternalMap( const ValueInternalMap &other ) + : buckets_( 0 ) + , tailLink_( 0 ) + , bucketsSize_( 0 ) + , itemCount_( 0 ) +{ + reserve( other.itemCount_ ); + IteratorState it; + IteratorState itEnd; + other.makeBeginIterator( it ); + other.makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + bool isStatic; + const char *memberName = key( it, isStatic ); + const Value &aValue = value( it ); + resolveReference(memberName, isStatic) = aValue; + } +} + + +ValueInternalMap & +ValueInternalMap::operator =( const ValueInternalMap &other ) +{ + ValueInternalMap dummy( other ); + swap( dummy ); + return *this; +} + + +ValueInternalMap::~ValueInternalMap() +{ + if ( buckets_ ) + { + for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex ) + { + ValueInternalLink *link = buckets_[bucketIndex].next_; + while ( link ) + { + ValueInternalLink *linkToRelease = link; + link = link->next_; + mapAllocator()->releaseMapLink( linkToRelease ); + } + } + mapAllocator()->releaseMapBuckets( buckets_ ); + } +} + + +void +ValueInternalMap::swap( ValueInternalMap &other ) +{ + ValueInternalLink *tempBuckets = buckets_; + buckets_ = other.buckets_; + other.buckets_ = tempBuckets; + ValueInternalLink *tempTailLink = tailLink_; + tailLink_ = other.tailLink_; + other.tailLink_ = tempTailLink; + BucketIndex tempBucketsSize = bucketsSize_; + bucketsSize_ = other.bucketsSize_; + other.bucketsSize_ = tempBucketsSize; + BucketIndex tempItemCount = itemCount_; + itemCount_ = other.itemCount_; + other.itemCount_ = tempItemCount; +} + + +void +ValueInternalMap::clear() +{ + ValueInternalMap dummy; + swap( dummy ); +} + + +ValueInternalMap::BucketIndex +ValueInternalMap::size() const +{ + return itemCount_; +} + +bool +ValueInternalMap::reserveDelta( BucketIndex growth ) +{ + return reserve( itemCount_ + growth ); +} + +bool +ValueInternalMap::reserve( BucketIndex newItemCount ) +{ + if ( !buckets_ && newItemCount > 0 ) + { + buckets_ = mapAllocator()->allocateMapBuckets( 1 ); + bucketsSize_ = 1; + tailLink_ = &buckets_[0]; + } +// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink; + return true; +} + + +const Value * +ValueInternalMap::find( const char *key ) const +{ + if ( !bucketsSize_ ) + return 0; + HashKey hashedKey = hash( key ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( const ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + current = current->next_ ) + { + for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return 0; + if ( strcmp( key, current->keys_[index] ) == 0 ) + return ¤t->items_[index]; + } + } + return 0; +} + + +Value * +ValueInternalMap::find( const char *key ) +{ + const ValueInternalMap *constThis = this; + return const_cast( constThis->find( key ) ); +} + + +Value & +ValueInternalMap::resolveReference( const char *key, + bool isStatic ) +{ + HashKey hashedKey = hash( key ); + if ( bucketsSize_ ) + { + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink **previous = 0; + BucketIndex index; + for ( ValueInternalLink *current = &buckets_[bucketIndex]; + current != 0; + previous = ¤t->next_, current = current->next_ ) + { + for ( index=0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( current->items_[index].isItemAvailable() ) + return setNewItem( key, isStatic, current, index ); + if ( strcmp( key, current->keys_[index] ) == 0 ) + return current->items_[index]; + } + } + } + + reserveDelta( 1 ); + return unsafeAdd( key, isStatic, hashedKey ); +} + + +void +ValueInternalMap::remove( const char *key ) +{ + HashKey hashedKey = hash( key ); + if ( !bucketsSize_ ) + return; + BucketIndex bucketIndex = hashedKey % bucketsSize_; + for ( ValueInternalLink *link = &buckets_[bucketIndex]; + link != 0; + link = link->next_ ) + { + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + return; + if ( strcmp( key, link->keys_[index] ) == 0 ) + { + doActualRemove( link, index, bucketIndex ); + return; + } + } + } +} + +void +ValueInternalMap::doActualRemove( ValueInternalLink *link, + BucketIndex index, + BucketIndex bucketIndex ) +{ + // find last item of the bucket and swap it with the 'removed' one. + // set removed items flags to 'available'. + // if last page only contains 'available' items, then desallocate it (it's empty) + ValueInternalLink *&lastLink = getLastLinkInBucket( index ); + BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1 + for ( ; + lastItemIndex < ValueInternalLink::itemPerLink; + ++lastItemIndex ) // may be optimized with dicotomic search + { + if ( lastLink->items_[lastItemIndex].isItemAvailable() ) + break; + } + + BucketIndex lastUsedIndex = lastItemIndex - 1; + Value *valueToDelete = &link->items_[index]; + Value *valueToPreserve = &lastLink->items_[lastUsedIndex]; + if ( valueToDelete != valueToPreserve ) + valueToDelete->swap( *valueToPreserve ); + if ( lastUsedIndex == 0 ) // page is now empty + { // remove it from bucket linked list and delete it. + ValueInternalLink *linkPreviousToLast = lastLink->previous_; + if ( linkPreviousToLast != 0 ) // can not deleted bucket link. + { + mapAllocator()->releaseMapLink( lastLink ); + linkPreviousToLast->next_ = 0; + lastLink = linkPreviousToLast; + } + } + else + { + Value dummy; + valueToPreserve->swap( dummy ); // restore deleted to default Value. + valueToPreserve->setItemUsed( false ); + } + --itemCount_; +} + + +ValueInternalLink *& +ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex ) +{ + if ( bucketIndex == bucketsSize_ - 1 ) + return tailLink_; + ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_; + if ( !previous ) + previous = &buckets_[bucketIndex]; + return previous; +} + + +Value & +ValueInternalMap::setNewItem( const char *key, + bool isStatic, + ValueInternalLink *link, + BucketIndex index ) +{ + char *duplicatedKey = makeMemberName( key ); + ++itemCount_; + link->keys_[index] = duplicatedKey; + link->items_[index].setItemUsed(); + link->items_[index].setMemberNameIsStatic( isStatic ); + return link->items_[index]; // items already default constructed. +} + + +Value & +ValueInternalMap::unsafeAdd( const char *key, + bool isStatic, + HashKey hashedKey ) +{ + JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." ); + BucketIndex bucketIndex = hashedKey % bucketsSize_; + ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex ); + ValueInternalLink *link = previousLink; + BucketIndex index; + for ( index =0; index < ValueInternalLink::itemPerLink; ++index ) + { + if ( link->items_[index].isItemAvailable() ) + break; + } + if ( index == ValueInternalLink::itemPerLink ) // need to add a new page + { + ValueInternalLink *newLink = mapAllocator()->allocateMapLink(); + index = 0; + link->next_ = newLink; + previousLink = newLink; + link = newLink; + } + return setNewItem( key, isStatic, link, index ); +} + + +ValueInternalMap::HashKey +ValueInternalMap::hash( const char *key ) const +{ + HashKey hash = 0; + while ( *key ) + hash += *key++ * 37; + return hash; +} + + +int +ValueInternalMap::compare( const ValueInternalMap &other ) const +{ + int sizeDiff( itemCount_ - other.itemCount_ ); + if ( sizeDiff != 0 ) + return sizeDiff; + // Strict order guaranty is required. Compare all keys FIRST, then compare values. + IteratorState it; + IteratorState itEnd; + makeBeginIterator( it ); + makeEndIterator( itEnd ); + for ( ; !equals(it,itEnd); increment(it) ) + { + if ( !other.find( key( it ) ) ) + return 1; + } + + // All keys are equals, let's compare values + makeBeginIterator( it ); + for ( ; !equals(it,itEnd); increment(it) ) + { + const Value *otherValue = other.find( key( it ) ); + int valueDiff = value(it).compare( *otherValue ); + if ( valueDiff != 0 ) + return valueDiff; + } + return 0; +} + + +void +ValueInternalMap::makeBeginIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = 0; + it.itemIndex_ = 0; + it.link_ = buckets_; +} + + +void +ValueInternalMap::makeEndIterator( IteratorState &it ) const +{ + it.map_ = const_cast( this ); + it.bucketIndex_ = bucketsSize_; + it.itemIndex_ = 0; + it.link_ = 0; +} + + +bool +ValueInternalMap::equals( const IteratorState &x, const IteratorState &other ) +{ + return x.map_ == other.map_ + && x.bucketIndex_ == other.bucketIndex_ + && x.link_ == other.link_ + && x.itemIndex_ == other.itemIndex_; +} + + +void +ValueInternalMap::incrementBucket( IteratorState &iterator ) +{ + ++iterator.bucketIndex_; + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ ) + iterator.link_ = 0; + else + iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]); + iterator.itemIndex_ = 0; +} + + +void +ValueInternalMap::increment( IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." ); + ++iterator.itemIndex_; + if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink ) + { + JSON_ASSERT_MESSAGE( iterator.link_ != 0, + "ValueInternalMap::increment(): attempting to iterate beyond end." ); + iterator.link_ = iterator.link_->next_; + if ( iterator.link_ == 0 ) + incrementBucket( iterator ); + } + else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() ) + { + incrementBucket( iterator ); + } +} + + +void +ValueInternalMap::decrement( IteratorState &iterator ) +{ + if ( iterator.itemIndex_ == 0 ) + { + JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." ); + if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] ) + { + JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." ); + --(iterator.bucketIndex_); + } + iterator.link_ = iterator.link_->previous_; + iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1; + } +} + + +const char * +ValueInternalMap::key( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->keys_[iterator.itemIndex_]; +} + +const char * +ValueInternalMap::key( const IteratorState &iterator, bool &isStatic ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic(); + return iterator.link_->keys_[iterator.itemIndex_]; +} + + +Value & +ValueInternalMap::value( const IteratorState &iterator ) +{ + JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." ); + return iterator.link_->items_[iterator.itemIndex_]; +} + + +int +ValueInternalMap::distance( const IteratorState &x, const IteratorState &y ) +{ + int offset = 0; + IteratorState it = x; + while ( !equals( it, y ) ) + increment( it ); + return offset; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp new file mode 100644 index 0000000..8bb0304 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_reader.cpp @@ -0,0 +1,880 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_( true ) + , strictRoot_( false ) +{ +} + + +Features +Features::all() +{ + return Features(); +} + + +Features +Features::strictMode() +{ + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4; +} + +static inline bool +in( Reader::Char c, Reader::Char c1, Reader::Char c2, Reader::Char c3, Reader::Char c4, Reader::Char c5 ) +{ + return c == c1 || c == c2 || c == c3 || c == c4 || c == c5; +} + + +static bool +containsNewLine( Reader::Location begin, + Reader::Location end ) +{ + for ( ;begin < end; ++begin ) + if ( *begin == '\n' || *begin == '\r' ) + return true; + return false; +} + + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : features_( Features::all() ) +{ +} + + +Reader::Reader( const Features &features ) + : features_( features ) +{ +} + + +bool +Reader::parse( const std::string &document, + Value &root, + bool collectComments ) +{ + document_ = document; + const char *begin = document_.c_str(); + const char *end = begin + document_.length(); + return parse( begin, end, root, collectComments ); +} + + +bool +Reader::parse( std::istream& sin, + Value &root, + bool collectComments ) +{ + //std::istream_iterator begin(sin); + //std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse( doc, root, collectComments ); +} + +bool +Reader::parse( const char *beginDoc, const char *endDoc, + Value &root, + bool collectComments ) +{ + if ( !features_.allowComments_ ) + { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while ( !nodes_.empty() ) + nodes_.pop(); + nodes_.push( &root ); + + bool successful = readValue(); + Token token; + skipCommentTokens( token ); + if ( collectComments_ && !commentsBefore_.empty() ) + root.setComment( commentsBefore_, commentAfter ); + if ( features_.strictRoot_ ) + { + if ( !root.isArray() && !root.isObject() ) + { + // Set error location to start of doc, ideally should be first token found in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( "A valid JSON document must be either an array or an object value.", + token ); + return false; + } + } + return successful; +} + + +bool +Reader::readValue() +{ + Token token; + skipCommentTokens( token ); + bool successful = true; + + if ( collectComments_ && !commentsBefore_.empty() ) + { + currentValue().setComment( commentsBefore_, commentBefore ); + commentsBefore_ = ""; + } + + + switch ( token.type_ ) + { + case tokenObjectBegin: + successful = readObject( token ); + break; + case tokenArrayBegin: + successful = readArray( token ); + break; + case tokenNumber: + successful = decodeNumber( token ); + break; + case tokenString: + successful = decodeString( token ); + break; + case tokenTrue: + currentValue() = true; + break; + case tokenFalse: + currentValue() = false; + break; + case tokenNull: + currentValue() = Value(); + break; + default: + return addError( "Syntax error: value, object or array expected.", token ); + } + + if ( collectComments_ ) + { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + return successful; +} + + +void +Reader::skipCommentTokens( Token &token ) +{ + if ( features_.allowComments_ ) + { + do + { + readToken( token ); + } + while ( token.type_ == tokenComment ); + } + else + { + readToken( token ); + } +} + + +bool +Reader::expectToken( TokenType type, Token &token, const char *message ) +{ + readToken( token ); + if ( token.type_ != type ) + return addError( message, token ); + return true; +} + + +bool +Reader::readToken( Token &token ) +{ + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch ( c ) + { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match( "rue", 3 ); + break; + case 'f': + token.type_ = tokenFalse; + ok = match( "alse", 4 ); + break; + case 'n': + token.type_ = tokenNull; + ok = match( "ull", 3 ); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if ( !ok ) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + + +void +Reader::skipSpaces() +{ + while ( current_ != end_ ) + { + Char c = *current_; + if ( c == ' ' || c == '\t' || c == '\r' || c == '\n' ) + ++current_; + else + break; + } +} + + +bool +Reader::match( Location pattern, + int patternLength ) +{ + if ( end_ - current_ < patternLength ) + return false; + int index = patternLength; + while ( index-- ) + if ( current_[index] != pattern[index] ) + return false; + current_ += patternLength; + return true; +} + + +bool +Reader::readComment() +{ + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if ( c == '*' ) + successful = readCStyleComment(); + else if ( c == '/' ) + successful = readCppStyleComment(); + if ( !successful ) + return false; + + if ( collectComments_ ) + { + CommentPlacement placement = commentBefore; + if ( lastValueEnd_ && !containsNewLine( lastValueEnd_, commentBegin ) ) + { + if ( c != '*' || !containsNewLine( commentBegin, current_ ) ) + placement = commentAfterOnSameLine; + } + + addComment( commentBegin, current_, placement ); + } + return true; +} + + +void +Reader::addComment( Location begin, + Location end, + CommentPlacement placement ) +{ + assert( collectComments_ ); + if ( placement == commentAfterOnSameLine ) + { + assert( lastValue_ != 0 ); + lastValue_->setComment( std::string( begin, end ), placement ); + } + else + { + if ( !commentsBefore_.empty() ) + commentsBefore_ += "\n"; + commentsBefore_ += std::string( begin, end ); + } +} + + +bool +Reader::readCStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '*' && *current_ == '/' ) + break; + } + return getNextChar() == '/'; +} + + +bool +Reader::readCppStyleComment() +{ + while ( current_ != end_ ) + { + Char c = getNextChar(); + if ( c == '\r' || c == '\n' ) + break; + } + return true; +} + + +void +Reader::readNumber() +{ + while ( current_ != end_ ) + { + if ( !(*current_ >= '0' && *current_ <= '9') && + !in( *current_, '.', 'e', 'E', '+', '-' ) ) + break; + ++current_; + } +} + +bool +Reader::readString() +{ + Char c = 0; + while ( current_ != end_ ) + { + c = getNextChar(); + if ( c == '\\' ) + getNextChar(); + else if ( c == '"' ) + break; + } + return c == '"'; +} + + +bool +Reader::readObject( Token &/*tokenStart*/ ) +{ + Token tokenName; + std::string name; + currentValue() = Value( objectValue ); + while ( readToken( tokenName ) ) + { + bool initialTokenOk = true; + while ( tokenName.type_ == tokenComment && initialTokenOk ) + initialTokenOk = readToken( tokenName ); + if ( !initialTokenOk ) + break; + if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object + return true; + if ( tokenName.type_ != tokenString ) + break; + + name = ""; + if ( !decodeString( tokenName, name ) ) + return recoverFromError( tokenObjectEnd ); + + Token colon; + if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator ) + { + return addErrorAndRecover( "Missing ':' after object member name", + colon, + tokenObjectEnd ); + } + Value &value = currentValue()[ name ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenObjectEnd ); + + Token comma; + if ( !readToken( comma ) + || ( comma.type_ != tokenObjectEnd && + comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment ) ) + { + return addErrorAndRecover( "Missing ',' or '}' in object declaration", + comma, + tokenObjectEnd ); + } + bool finalizeTokenOk = true; + while ( comma.type_ == tokenComment && + finalizeTokenOk ) + finalizeTokenOk = readToken( comma ); + if ( comma.type_ == tokenObjectEnd ) + return true; + } + return addErrorAndRecover( "Missing '}' or object member name", + tokenName, + tokenObjectEnd ); +} + + +bool +Reader::readArray( Token &/*tokenStart*/ ) +{ + currentValue() = Value( arrayValue ); + skipSpaces(); + if ( *current_ == ']' ) // empty array + { + Token endArray; + readToken( endArray ); + return true; + } + int index = 0; + for (;;) + { + Value &value = currentValue()[ index++ ]; + nodes_.push( &value ); + bool ok = readValue(); + nodes_.pop(); + if ( !ok ) // error already set + return recoverFromError( tokenArrayEnd ); + + Token token; + // Accept Comment after last item in the array. + ok = readToken( token ); + while ( token.type_ == tokenComment && ok ) + { + ok = readToken( token ); + } + bool badTokenType = ( token.type_ != tokenArraySeparator && + token.type_ != tokenArrayEnd ); + if ( !ok || badTokenType ) + { + return addErrorAndRecover( "Missing ',' or ']' in array declaration", + token, + tokenArrayEnd ); + } + if ( token.type_ == tokenArrayEnd ) + break; + } + return true; +} + + +bool +Reader::decodeNumber( Token &token ) +{ + bool isDouble = false; + for ( Location inspect = token.start_; inspect != token.end_; ++inspect ) + { + isDouble = isDouble + || in( *inspect, '.', 'e', 'E', '+' ) + || ( *inspect == '-' && inspect != token.start_ ); + } + if ( isDouble ) + return decodeDouble( token ); + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if ( isNegative ) + ++current; + Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); + assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); + Value::LargestUInt value = 0; + while ( current < token.end_ ) + { + Char c = *current++; + if ( c < '0' || c > '9' ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + Value::UInt digit(c - '0'); + if ( value >= threshold ) + { + // If the current digit is not the last one, or if it is + // greater than the last digit of the maximum integer value, + // the parse the number as a double. + if ( current != token.end_ || digit > lastDigitThreshold ) + { + return decodeDouble( token ); + } + } + value = value * 10 + digit; + } + if ( isNegative ) + currentValue() = -Value::LargestInt( value ); + else if ( value <= Value::LargestUInt(Value::maxInt) ) + currentValue() = Value::LargestInt( value ); + else + currentValue() = value; + return true; +} + + +bool +Reader::decodeDouble( Token &token ) +{ + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + if ( length <= bufferSize ) + { + Char buffer[bufferSize+1]; + memcpy( buffer, token.start_, length ); + buffer[length] = 0; + count = sscanf( buffer, "%lf", &value ); + } + else + { + std::string buffer( token.start_, token.end_ ); + count = sscanf( buffer.c_str(), "%lf", &value ); + } + + if ( count != 1 ) + return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token ); + currentValue() = value; + return true; +} + + +bool +Reader::decodeString( Token &token ) +{ + std::string decoded; + if ( !decodeString( token, decoded ) ) + return false; + currentValue() = decoded; + return true; +} + + +bool +Reader::decodeString( Token &token, std::string &decoded ) +{ + decoded.reserve( token.end_ - token.start_ - 2 ); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while ( current != end ) + { + Char c = *current++; + if ( c == '"' ) + break; + else if ( c == '\\' ) + { + if ( current == end ) + return addError( "Empty escape sequence in string", token, current ); + Char escape = *current++; + switch ( escape ) + { + case '"': decoded += '"'; break; + case '/': decoded += '/'; break; + case '\\': decoded += '\\'; break; + case 'b': decoded += '\b'; break; + case 'f': decoded += '\f'; break; + case 'n': decoded += '\n'; break; + case 'r': decoded += '\r'; break; + case 't': decoded += '\t'; break; + case 'u': + { + unsigned int unicode; + if ( !decodeUnicodeCodePoint( token, current, end, unicode ) ) + return false; + decoded += codePointToUTF8(unicode); + } + break; + default: + return addError( "Bad escape sequence in string", token, current ); + } + } + else + { + decoded += c; + } + } + return true; +} + +bool +Reader::decodeUnicodeCodePoint( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + + if ( !decodeUnicodeEscapeSequence( token, current, end, unicode ) ) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) + { + // surrogate pairs + if (end - current < 6) + return addError( "additional six characters expected to parse unicode surrogate pair.", token, current ); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++)== 'u') + { + if (decodeUnicodeEscapeSequence( token, current, end, surrogatePair )) + { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } + else + return false; + } + else + return addError( "expecting another \\u token to begin the second half of a unicode surrogate pair", token, current ); + } + return true; +} + +bool +Reader::decodeUnicodeEscapeSequence( Token &token, + Location ¤t, + Location end, + unsigned int &unicode ) +{ + if ( end - current < 4 ) + return addError( "Bad unicode escape sequence in string: four digits expected.", token, current ); + unicode = 0; + for ( int index =0; index < 4; ++index ) + { + Char c = *current++; + unicode *= 16; + if ( c >= '0' && c <= '9' ) + unicode += c - '0'; + else if ( c >= 'a' && c <= 'f' ) + unicode += c - 'a' + 10; + else if ( c >= 'A' && c <= 'F' ) + unicode += c - 'A' + 10; + else + return addError( "Bad unicode escape sequence in string: hexadecimal digit expected.", token, current ); + } + return true; +} + + +bool +Reader::addError( const std::string &message, + Token &token, + Location extra ) +{ + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back( info ); + return false; +} + + +bool +Reader::recoverFromError( TokenType skipUntilToken ) +{ + int errorCount = int(errors_.size()); + Token skip; + for (;;) + { + if ( !readToken(skip) ) + errors_.resize( errorCount ); // discard errors caused by recovery + if ( skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream ) + break; + } + errors_.resize( errorCount ); + return false; +} + + +bool +Reader::addErrorAndRecover( const std::string &message, + Token &token, + TokenType skipUntilToken ) +{ + addError( message, token ); + return recoverFromError( skipUntilToken ); +} + + +Value & +Reader::currentValue() +{ + return *(nodes_.top()); +} + + +Reader::Char +Reader::getNextChar() +{ + if ( current_ == end_ ) + return 0; + return *current_++; +} + + +void +Reader::getLocationLineAndColumn( Location location, + int &line, + int &column ) const +{ + Location current = begin_; + Location lastLineStart = current; + line = 0; + while ( current < location && current != end_ ) + { + Char c = *current++; + if ( c == '\r' ) + { + if ( *current == '\n' ) + ++current; + lastLineStart = current; + ++line; + } + else if ( c == '\n' ) + { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + + +std::string +Reader::getLocationLineAndColumn( Location location ) const +{ + int line, column; + getLocationLineAndColumn( location, line, column ); + char buffer[18+16+16+1]; + sprintf( buffer, "Line %d, Column %d", line, column ); + return buffer; +} + + +// Deprecated. Preserved for backward compatibility +std::string +Reader::getFormatedErrorMessages() const +{ + return getFormattedErrorMessages(); +} + + +std::string +Reader::getFormattedErrorMessages() const +{ + std::string formattedMessage; + for ( Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError ) + { + const ErrorInfo &error = *itError; + formattedMessage += "* " + getLocationLineAndColumn( error.token_.start_ ) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if ( error.extra_ ) + formattedMessage += "See " + getLocationLineAndColumn( error.extra_ ) + " for detail.\n"; + } + return formattedMessage; +} + + +std::istream& operator>>( std::istream &sin, Value &root ) +{ + Json::Reader reader; + bool ok = reader.parse(sin, root, true); + //JSON_ASSERT( ok ); + if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + return sin; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h new file mode 100644 index 0000000..658031b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_tool.h @@ -0,0 +1,93 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string +codePointToUTF8(unsigned int cp) +{ + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) + { + result.resize(1); + result[0] = static_cast(cp); + } + else if (cp <= 0x7FF) + { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } + else if (cp <= 0xFFFF) + { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = 0x80 | static_cast((0x3f & (cp >> 6))); + result[0] = 0xE0 | static_cast((0xf & (cp >> 12))); + } + else if (cp <= 0x10FFFF) + { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + + +/// Returns true if ch is a control character (in range [0,32[). +static inline bool +isControlCharacter(char ch) +{ + return ch > 0 && ch <= 0x1F; +} + + +enum { + /// Constant that specify the size of the buffer that must be passed to uintToString. + uintToStringBufferSize = 3*sizeof(LargestUInt)+1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void +uintToString( LargestUInt value, + char *¤t ) +{ + *--current = 0; + do + { + *--current = char(value % 10) + '0'; + value /= 10; + } + while ( value != 0 ); +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp new file mode 100644 index 0000000..ff98f63 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_value.cpp @@ -0,0 +1,1829 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include +# ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +# include "json_batchallocator.h" +# endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +# include +#endif +#include // size_t + +#define JSON_ASSERT_UNREACHABLE assert( false ) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) JSON_FAIL_MESSAGE( message ) + +namespace Json { + +const Value Value::null; +const Int Value::minInt = Int( ~(UInt(-1)/2) ); +const Int Value::maxInt = Int( UInt(-1)/2 ); +const UInt Value::maxUInt = UInt(-1); +const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); +const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); +const UInt64 Value::maxUInt64 = UInt64(-1); +const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); +const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + + +/// Unknown size marker +static const unsigned int unknown = (unsigned)-1; + + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char * +duplicateStringValue( const char *value, + unsigned int length = unknown ) +{ + if ( length == unknown ) + length = (unsigned int)strlen(value); + char *newString = static_cast( malloc( length + 1 ) ); + JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); + memcpy( newString, value, length ); + newString[length] = 0; + return newString; +} + + +/** Free the string duplicated by duplicateStringValue(). + */ +static inline void +releaseStringValue( char *value ) +{ + if ( value ) + free( value ); +} + +} // namespace Json + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGAMATION) +# ifdef JSON_VALUE_USE_INTERNAL_MAP +# include "json_internalarray.inl" +# include "json_internalmap.inl" +# endif // JSON_VALUE_USE_INTERNAL_MAP + +# include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +Value::CommentInfo::CommentInfo() + : comment_( 0 ) +{ +} + +Value::CommentInfo::~CommentInfo() +{ + if ( comment_ ) + releaseStringValue( comment_ ); +} + + +void +Value::CommentInfo::setComment( const char *text ) +{ + if ( comment_ ) + releaseStringValue( comment_ ); + JSON_ASSERT( text != 0 ); + JSON_ASSERT_MESSAGE( text[0]=='\0' || text[0]=='/', "Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue( text ); +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +# ifndef JSON_VALUE_USE_INTERNAL_MAP + +// Notes: index_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString( ArrayIndex index ) + : cstr_( 0 ) + , index_( index ) +{ +} + +Value::CZString::CZString( const char *cstr, DuplicationPolicy allocate ) + : cstr_( allocate == duplicate ? duplicateStringValue(cstr) + : cstr ) + , index_( allocate ) +{ +} + +Value::CZString::CZString( const CZString &other ) +: cstr_( other.index_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue( other.cstr_ ) + : other.cstr_ ) + , index_( other.cstr_ ? (other.index_ == noDuplication ? noDuplication : duplicate) + : other.index_ ) +{ +} + +Value::CZString::~CZString() +{ + if ( cstr_ && index_ == duplicate ) + releaseStringValue( const_cast( cstr_ ) ); +} + +void +Value::CZString::swap( CZString &other ) +{ + std::swap( cstr_, other.cstr_ ); + std::swap( index_, other.index_ ); +} + +Value::CZString & +Value::CZString::operator =( const CZString &other ) +{ + CZString temp( other ); + swap( temp ); + return *this; +} + +bool +Value::CZString::operator<( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) < 0; + return index_ < other.index_; +} + +bool +Value::CZString::operator==( const CZString &other ) const +{ + if ( cstr_ ) + return strcmp( cstr_, other.cstr_ ) == 0; + return index_ == other.index_; +} + + +ArrayIndex +Value::CZString::index() const +{ + return index_; +} + + +const char * +Value::CZString::c_str() const +{ + return cstr_; +} + +bool +Value::CZString::isStaticString() const +{ + return index_ == noDuplication; +} + +#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value( ValueType type ) + : type_( type ) + , allocated_( 0 ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type ) + { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArray(); + break; + case objectValue: + value_.map_ = mapAllocator()->newMap(); + break; +#endif + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + + +#if defined(JSON_HAS_INT64) +Value::Value( UInt value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( Int value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + +#endif // if defined(JSON_HAS_INT64) + + +Value::Value( Int64 value ) + : type_( intValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.int_ = value; +} + + +Value::Value( UInt64 value ) + : type_( uintValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.uint_ = value; +} + +Value::Value( double value ) + : type_( realValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.real_ = value; +} + +Value::Value( const char *value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value ); +} + + +Value::Value( const char *beginValue, + const char *endValue ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( beginValue, + (unsigned int)(endValue - beginValue) ); +} + + +Value::Value( const std::string &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value.c_str(), + (unsigned int)value.length() ); + +} + +Value::Value( const StaticString &value ) + : type_( stringValue ) + , allocated_( false ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = const_cast( value.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +Value::Value( const CppTL::ConstString &value ) + : type_( stringValue ) + , allocated_( true ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.string_ = duplicateStringValue( value, value.length() ); +} +# endif + +Value::Value( bool value ) + : type_( booleanValue ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + value_.bool_ = value; +} + + +Value::Value( const Value &other ) + : type_( other.type_ ) + , comments_( 0 ) +# ifdef JSON_VALUE_USE_INTERNAL_MAP + , itemIsUsed_( 0 ) +#endif +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if ( other.value_.string_ ) + { + value_.string_ = duplicateStringValue( other.value_.string_ ); + allocated_ = true; + } + else + value_.string_ = 0; + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues( *other.value_.map_ ); + break; +#else + case arrayValue: + value_.array_ = arrayAllocator()->newArrayCopy( *other.value_.array_ ); + break; + case objectValue: + value_.map_ = mapAllocator()->newMapCopy( *other.value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + if ( other.comments_ ) + { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for ( int comment =0; comment < numberOfCommentPlacement; ++comment ) + { + const CommentInfo &otherComment = other.comments_[comment]; + if ( otherComment.comment_ ) + comments_[comment].setComment( otherComment.comment_ ); + } + } +} + + +Value::~Value() +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if ( allocated_ ) + releaseStringValue( value_.string_ ); + break; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + delete value_.map_; + break; +#else + case arrayValue: + arrayAllocator()->destructArray( value_.array_ ); + break; + case objectValue: + mapAllocator()->destructMap( value_.map_ ); + break; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + + if ( comments_ ) + delete[] comments_; +} + +Value & +Value::operator=( const Value &other ) +{ + Value temp( other ); + swap( temp ); + return *this; +} + +void +Value::swap( Value &other ) +{ + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap( value_, other.value_ ); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2; +} + +ValueType +Value::type() const +{ + return type_; +} + + +int +Value::compare( const Value &other ) const +{ + if ( *this < other ) + return -1; + if ( *this > other ) + return 1; + return 0; +} + + +bool +Value::operator <( const Value &other ) const +{ + int typeDelta = type_ - other.type_; + if ( typeDelta ) + return typeDelta < 0 ? true : false; + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + return ( value_.string_ == 0 && other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) < 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + { + int delta = int( value_.map_->size() - other.value_.map_->size() ); + if ( delta ) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) < 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) < 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool +Value::operator <=( const Value &other ) const +{ + return !(other < *this); +} + +bool +Value::operator >=( const Value &other ) const +{ + return !(*this < other); +} + +bool +Value::operator >( const Value &other ) const +{ + return other < *this; +} + +bool +Value::operator ==( const Value &other ) const +{ + //if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if ( type_ != temp ) + return false; + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + return ( value_.string_ == other.value_.string_ ) + || ( other.value_.string_ + && value_.string_ + && strcmp( value_.string_, other.value_.string_ ) == 0 ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() + && (*value_.map_) == (*other.value_.map_); +#else + case arrayValue: + return value_.array_->compare( *(other.value_.array_) ) == 0; + case objectValue: + return value_.map_->compare( *(other.value_.map_) ) == 0; +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool +Value::operator !=( const Value &other ) const +{ + return !( *this == other ); +} + +const char * +Value::asCString() const +{ + JSON_ASSERT( type_ == stringValue ); + return value_.string_; +} + + +std::string +Value::asString() const +{ + switch ( type_ ) + { + case nullValue: + return ""; + case stringValue: + return value_.string_ ? value_.string_ : ""; + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + case uintValue: + case realValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to string" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return ""; // unreachable +} + +# ifdef JSON_USE_CPPTL +CppTL::ConstString +Value::asConstString() const +{ + return CppTL::ConstString( asString().c_str() ); +} +# endif + + +Value::Int +Value::asInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to int" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt +Value::asUInt() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); + JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to uint" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +# if defined(JSON_HAS_INT64) + +Value::Int64 +Value::asInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + return value_.int_; + case uintValue: + JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); + return Int( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to Int64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +Value::UInt64 +Value::asUInt64() const +{ + switch ( type_ ) + { + case nullValue: + return 0; + case intValue: + JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); + return value_.int_; + case uintValue: + return value_.uint_; + case realValue: + JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); + return UInt( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1 : 0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to UInt64" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} +# endif // if defined(JSON_HAS_INT64) + + +LargestInt +Value::asLargestInt() const +{ +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + + +LargestUInt +Value::asLargestUInt() const +{ +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + + +double +Value::asDouble() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to double" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + +float +Value::asFloat() const +{ + switch ( type_ ) + { + case nullValue: + return 0.0f; + case intValue: + return static_cast( value_.int_ ); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( value_.uint_ ); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast( value_.real_ ); + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + case stringValue: + case arrayValue: + case objectValue: + JSON_FAIL_MESSAGE( "Type is not convertible to float" ); + default: + JSON_ASSERT_UNREACHABLE; + } + return 0.0f; // unreachable; +} + +bool +Value::asBool() const +{ + switch ( type_ ) + { + case nullValue: + return false; + case intValue: + case uintValue: + return value_.int_ != 0; + case realValue: + return value_.real_ != 0.0; + case booleanValue: + return value_.bool_; + case stringValue: + return value_.string_ && value_.string_[0] != 0; + case arrayValue: + case objectValue: + return value_.map_->size() != 0; + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +bool +Value::isConvertibleTo( ValueType other ) const +{ + switch ( type_ ) + { + case nullValue: + return true; + case intValue: + return ( other == nullValue && value_.int_ == 0 ) + || other == intValue + || ( other == uintValue && value_.int_ >= 0 ) + || other == realValue + || other == stringValue + || other == booleanValue; + case uintValue: + return ( other == nullValue && value_.uint_ == 0 ) + || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case realValue: + return ( other == nullValue && value_.real_ == 0.0 ) + || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) + || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) + || other == realValue + || other == stringValue + || other == booleanValue; + case booleanValue: + return ( other == nullValue && value_.bool_ == false ) + || other == intValue + || other == uintValue + || other == realValue + || other == stringValue + || other == booleanValue; + case stringValue: + return other == stringValue + || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + case arrayValue: + return other == arrayValue + || ( other == nullValue && value_.map_->size() == 0 ); + case objectValue: + return other == objectValue + || ( other == nullValue && value_.map_->size() == 0 ); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable; +} + + +/// Number of values in array or object +ArrayIndex +Value::size() const +{ + switch ( type_ ) + { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: // size of the array is highest index + 1 + if ( !value_.map_->empty() ) + { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index()+1; + } + return 0; + case objectValue: + return ArrayIndex( value_.map_->size() ); +#else + case arrayValue: + return Int( value_.array_->size() ); + case objectValue: + return Int( value_.map_->size() ); +#endif + default: + JSON_ASSERT_UNREACHABLE; + } + return 0; // unreachable; +} + + +bool +Value::empty() const +{ + if ( isNull() || isArray() || isObject() ) + return size() == 0u; + else + return false; +} + + +bool +Value::operator!() const +{ + return isNull(); +} + + +void +Value::clear() +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue || type_ == objectValue ); + + switch ( type_ ) + { +#ifndef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + case objectValue: + value_.map_->clear(); + break; +#else + case arrayValue: + value_.array_->clear(); + break; + case objectValue: + value_.map_->clear(); + break; +#endif + default: + break; + } +} + +void +Value::resize( ArrayIndex newSize ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ArrayIndex oldSize = size(); + if ( newSize == 0 ) + clear(); + else if ( newSize > oldSize ) + (*this)[ newSize - 1 ]; + else + { + for ( ArrayIndex index = newSize; index < oldSize; ++index ) + { + value_.map_->erase( index ); + } + assert( size() == newSize ); + } +#else + value_.array_->resize( newSize ); +#endif +} + + +Value & +Value::operator[]( ArrayIndex index ) +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + *this = Value( arrayValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::iterator it = value_.map_->lower_bound( key ); + if ( it != value_.map_->end() && (*it).first == key ) + return (*it).second; + + ObjectValues::value_type defaultValue( key, null ); + it = value_.map_->insert( it, defaultValue ); + return (*it).second; +#else + return value_.array_->resolveReference( index ); +#endif +} + + +Value & +Value::operator[]( int index ) +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +const Value & +Value::operator[]( ArrayIndex index ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == arrayValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString key( index ); + ObjectValues::const_iterator it = value_.map_->find( key ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + Value *value = value_.array_->find( index ); + return value ? *value : null; +#endif +} + + +const Value & +Value::operator[]( int index ) const +{ + JSON_ASSERT( index >= 0 ); + return (*this)[ ArrayIndex(index) ]; +} + + +Value & +Value::operator[]( const char *key ) +{ + return resolveReference( key, false ); +} + + +Value & +Value::resolveReference( const char *key, + bool isStatic ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + *this = Value( objectValue ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, isStatic ? CZString::noDuplication + : CZString::duplicateOnCopy ); + ObjectValues::iterator it = value_.map_->lower_bound( actualKey ); + if ( it != value_.map_->end() && (*it).first == actualKey ) + return (*it).second; + + ObjectValues::value_type defaultValue( actualKey, null ); + it = value_.map_->insert( it, defaultValue ); + Value &value = (*it).second; + return value; +#else + return value_.map_->resolveReference( key, isStatic ); +#endif +} + + +Value +Value::get( ArrayIndex index, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[index]); + return value == &null ? defaultValue : *value; +} + + +bool +Value::isValidIndex( ArrayIndex index ) const +{ + return index < size(); +} + + + +const Value & +Value::operator[]( const char *key ) const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::const_iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + return (*it).second; +#else + const Value *value = value_.map_->find( key ); + return value ? *value : null; +#endif +} + + +Value & +Value::operator[]( const std::string &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const std::string &key ) const +{ + return (*this)[ key.c_str() ]; +} + +Value & +Value::operator[]( const StaticString &key ) +{ + return resolveReference( key, true ); +} + + +# ifdef JSON_USE_CPPTL +Value & +Value::operator[]( const CppTL::ConstString &key ) +{ + return (*this)[ key.c_str() ]; +} + + +const Value & +Value::operator[]( const CppTL::ConstString &key ) const +{ + return (*this)[ key.c_str() ]; +} +# endif + + +Value & +Value::append( const Value &value ) +{ + return (*this)[size()] = value; +} + + +Value +Value::get( const char *key, + const Value &defaultValue ) const +{ + const Value *value = &((*this)[key]); + return value == &null ? defaultValue : *value; +} + + +Value +Value::get( const std::string &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} + +Value +Value::removeMember( const char* key ) +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return null; +#ifndef JSON_VALUE_USE_INTERNAL_MAP + CZString actualKey( key, CZString::noDuplication ); + ObjectValues::iterator it = value_.map_->find( actualKey ); + if ( it == value_.map_->end() ) + return null; + Value old(it->second); + value_.map_->erase(it); + return old; +#else + Value *value = value_.map_->find( key ); + if (value){ + Value old(*value); + value_.map_.remove( key ); + return old; + } else { + return null; + } +#endif +} + +Value +Value::removeMember( const std::string &key ) +{ + return removeMember( key.c_str() ); +} + +# ifdef JSON_USE_CPPTL +Value +Value::get( const CppTL::ConstString &key, + const Value &defaultValue ) const +{ + return get( key.c_str(), defaultValue ); +} +# endif + +bool +Value::isMember( const char *key ) const +{ + const Value *value = &((*this)[key]); + return value != &null; +} + + +bool +Value::isMember( const std::string &key ) const +{ + return isMember( key.c_str() ); +} + + +# ifdef JSON_USE_CPPTL +bool +Value::isMember( const CppTL::ConstString &key ) const +{ + return isMember( key.c_str() ); +} +#endif + +Value::Members +Value::getMemberNames() const +{ + JSON_ASSERT( type_ == nullValue || type_ == objectValue ); + if ( type_ == nullValue ) + return Value::Members(); + Members members; + members.reserve( value_.map_->size() ); +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for ( ; it != itEnd; ++it ) + members.push_back( std::string( (*it).first.c_str() ) ); +#else + ValueInternalMap::IteratorState it; + ValueInternalMap::IteratorState itEnd; + value_.map_->makeBeginIterator( it ); + value_.map_->makeEndIterator( itEnd ); + for ( ; !ValueInternalMap::equals( it, itEnd ); ValueInternalMap::increment(it) ) + members.push_back( std::string( ValueInternalMap::key( it ) ) ); +#endif + return members; +} +// +//# ifdef JSON_USE_CPPTL +//EnumMemberNames +//Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +//EnumValues +//Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + + +bool +Value::isNull() const +{ + return type_ == nullValue; +} + + +bool +Value::isBool() const +{ + return type_ == booleanValue; +} + + +bool +Value::isInt() const +{ + return type_ == intValue; +} + + +bool +Value::isUInt() const +{ + return type_ == uintValue; +} + + +bool +Value::isIntegral() const +{ + return type_ == intValue + || type_ == uintValue + || type_ == booleanValue; +} + + +bool +Value::isDouble() const +{ + return type_ == realValue; +} + + +bool +Value::isNumeric() const +{ + return isIntegral() || isDouble(); +} + + +bool +Value::isString() const +{ + return type_ == stringValue; +} + + +bool +Value::isArray() const +{ + return type_ == nullValue || type_ == arrayValue; +} + + +bool +Value::isObject() const +{ + return type_ == nullValue || type_ == objectValue; +} + + +void +Value::setComment( const char *comment, + CommentPlacement placement ) +{ + if ( !comments_ ) + comments_ = new CommentInfo[numberOfCommentPlacement]; + comments_[placement].setComment( comment ); +} + + +void +Value::setComment( const std::string &comment, + CommentPlacement placement ) +{ + setComment( comment.c_str(), placement ); +} + + +bool +Value::hasComment( CommentPlacement placement ) const +{ + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string +Value::getComment( CommentPlacement placement ) const +{ + if ( hasComment(placement) ) + return comments_[placement].comment_; + return ""; +} + + +std::string +Value::toStyledString() const +{ + StyledWriter writer; + return writer.write( *this ); +} + + +Value::const_iterator +Value::begin() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + +Value::const_iterator +Value::end() const +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return const_iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return const_iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return const_iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return const_iterator(); +} + + +Value::iterator +Value::begin() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeBeginIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeBeginIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->begin() ); + break; +#endif + default: + break; + } + return iterator(); +} + +Value::iterator +Value::end() +{ + switch ( type_ ) + { +#ifdef JSON_VALUE_USE_INTERNAL_MAP + case arrayValue: + if ( value_.array_ ) + { + ValueInternalArray::IteratorState it; + value_.array_->makeEndIterator( it ); + return iterator( it ); + } + break; + case objectValue: + if ( value_.map_ ) + { + ValueInternalMap::IteratorState it; + value_.map_->makeEndIterator( it ); + return iterator( it ); + } + break; +#else + case arrayValue: + case objectValue: + if ( value_.map_ ) + return iterator( value_.map_->end() ); + break; +#endif + default: + break; + } + return iterator(); +} + + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() + : kind_( kindNone ) +{ +} + + +PathArgument::PathArgument( ArrayIndex index ) + : index_( index ) + , kind_( kindIndex ) +{ +} + + +PathArgument::PathArgument( const char *key ) + : key_( key ) + , kind_( kindKey ) +{ +} + + +PathArgument::PathArgument( const std::string &key ) + : key_( key.c_str() ) + , kind_( kindKey ) +{ +} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path( const std::string &path, + const PathArgument &a1, + const PathArgument &a2, + const PathArgument &a3, + const PathArgument &a4, + const PathArgument &a5 ) +{ + InArgs in; + in.push_back( &a1 ); + in.push_back( &a2 ); + in.push_back( &a3 ); + in.push_back( &a4 ); + in.push_back( &a5 ); + makePath( path, in ); +} + + +void +Path::makePath( const std::string &path, + const InArgs &in ) +{ + const char *current = path.c_str(); + const char *end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while ( current != end ) + { + if ( *current == '[' ) + { + ++current; + if ( *current == '%' ) + addPathInArg( path, in, itInArg, PathArgument::kindIndex ); + else + { + ArrayIndex index = 0; + for ( ; current != end && *current >= '0' && *current <= '9'; ++current ) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back( index ); + } + if ( current == end || *current++ != ']' ) + invalidPath( path, int(current - path.c_str()) ); + } + else if ( *current == '%' ) + { + addPathInArg( path, in, itInArg, PathArgument::kindKey ); + ++current; + } + else if ( *current == '.' ) + { + ++current; + } + else + { + const char *beginName = current; + while ( current != end && !strchr( "[.", *current ) ) + ++current; + args_.push_back( std::string( beginName, current ) ); + } + } +} + + +void +Path::addPathInArg( const std::string &path, + const InArgs &in, + InArgs::const_iterator &itInArg, + PathArgument::Kind kind ) +{ + if ( itInArg == in.end() ) + { + // Error: missing argument %d + } + else if ( (*itInArg)->kind_ != kind ) + { + // Error: bad argument type + } + else + { + args_.push_back( **itInArg ); + } +} + + +void +Path::invalidPath( const std::string &path, + int location ) +{ + // Error: invalid path. +} + + +const Value & +Path::resolve( const Value &root ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + { + // Error: unable to resolve path (object has no member named '' at position...) + } + } + } + return *node; +} + + +Value +Path::resolve( const Value &root, + const Value &defaultValue ) const +{ + const Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + return defaultValue; + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + return defaultValue; + node = &((*node)[arg.key_]); + if ( node == &Value::null ) + return defaultValue; + } + } + return *node; +} + + +Value & +Path::make( Value &root ) const +{ + Value *node = &root; + for ( Args::const_iterator it = args_.begin(); it != args_.end(); ++it ) + { + const PathArgument &arg = *it; + if ( arg.kind_ == PathArgument::kindIndex ) + { + if ( !node->isArray() ) + { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } + else if ( arg.kind_ == PathArgument::kindKey ) + { + if ( !node->isObject() ) + { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl new file mode 100644 index 0000000..7457ca3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_valueiterator.inl @@ -0,0 +1,299 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() +#ifndef JSON_VALUE_USE_INTERNAL_MAP + : current_() + , isNull_( true ) +{ +} +#else + : isArray_( true ) + , isNull_( true ) +{ + iterator_.array_ = ValueInternalArray::IteratorState(); +} +#endif + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator ¤t ) + : current_( current ) + , isNull_( false ) +{ +} +#else +ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state ) + : isArray_( true ) +{ + iterator_.array_ = state; +} + + +ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state ) + : isArray_( false ) +{ + iterator_.map_ = state; +} +#endif + +Value & +ValueIteratorBase::deref() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + return current_->second; +#else + if ( isArray_ ) + return ValueInternalArray::dereference( iterator_.array_ ); + return ValueInternalMap::value( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::increment() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + ++current_; +#else + if ( isArray_ ) + ValueInternalArray::increment( iterator_.array_ ); + ValueInternalMap::increment( iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::decrement() +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + --current_; +#else + if ( isArray_ ) + ValueInternalArray::decrement( iterator_.array_ ); + ValueInternalMap::decrement( iterator_.map_ ); +#endif +} + + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP +# ifdef JSON_USE_CPPTL_SMALLMAP + return current_ - other.current_; +# else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if ( isNull_ && other.isNull_ ) + { + return 0; + } + + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it ) + { + ++myDistance; + } + return myDistance; +# endif +#else + if ( isArray_ ) + return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +bool +ValueIteratorBase::isEqual( const SelfType &other ) const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + if ( isNull_ ) + { + return other.isNull_; + } + return current_ == other.current_; +#else + if ( isArray_ ) + return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ ); + return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ ); +#endif +} + + +void +ValueIteratorBase::copy( const SelfType &other ) +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + current_ = other.current_; +#else + if ( isArray_ ) + iterator_.array_ = other.iterator_.array_; + iterator_.map_ = other.iterator_.map_; +#endif +} + + +Value +ValueIteratorBase::key() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( czstring.c_str() ) + { + if ( czstring.isStaticString() ) + return Value( StaticString( czstring.c_str() ) ); + return Value( czstring.c_str() ); + } + return Value( czstring.index() ); +#else + if ( isArray_ ) + return Value( ValueInternalArray::indexOf( iterator_.array_ ) ); + bool isStatic; + const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic ); + if ( isStatic ) + return Value( StaticString( memberName ) ); + return Value( memberName ); +#endif +} + + +UInt +ValueIteratorBase::index() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const Value::CZString czstring = (*current_).first; + if ( !czstring.c_str() ) + return czstring.index(); + return Value::UInt( -1 ); +#else + if ( isArray_ ) + return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) ); + return Value::UInt( -1 ); +#endif +} + + +const char * +ValueIteratorBase::memberName() const +{ +#ifndef JSON_VALUE_USE_INTERNAL_MAP + const char *name = (*current_).first.c_str(); + return name ? name : ""; +#else + if ( !isArray_ ) + return ValueInternalMap::key( iterator_.map_ ); + return ""; +#endif +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueConstIterator & +ValueConstIterator::operator =( const ValueIteratorBase &other ) +{ + copy( other ); + return *this; +} + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() +{ +} + + +#ifndef JSON_VALUE_USE_INTERNAL_MAP +ValueIterator::ValueIterator( const Value::ObjectValues::iterator ¤t ) + : ValueIteratorBase( current ) +{ +} +#else +ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} + +ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state ) + : ValueIteratorBase( state ) +{ +} +#endif + +ValueIterator::ValueIterator( const ValueConstIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator::ValueIterator( const ValueIterator &other ) + : ValueIteratorBase( other ) +{ +} + +ValueIterator & +ValueIterator::operator =( const SelfType &other ) +{ + copy( other ); + return *this; +} + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp new file mode 100644 index 0000000..1bda183 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/json_writer.cpp @@ -0,0 +1,838 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +# include +# include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include +#include + +#if _MSC_VER >= 1400 // VC++ 8.0 +#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. +#endif + +namespace Json { + +static bool containsControlCharacter( const char* str ) +{ + while ( *str ) + { + if ( isControlCharacter( *(str++) ) ) + return true; + } + return false; +} + + +std::string valueToString( LargestInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + bool isNegative = value < 0; + if ( isNegative ) + value = -value; + uintToString( LargestUInt(value), current ); + if ( isNegative ) + *--current = '-'; + assert( current >= buffer ); + return current; +} + + +std::string valueToString( LargestUInt value ) +{ + UIntToStringBuffer buffer; + char *current = buffer + sizeof(buffer); + uintToString( value, current ); + assert( current >= buffer ); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString( Int value ) +{ + return valueToString( LargestInt(value) ); +} + + +std::string valueToString( UInt value ) +{ + return valueToString( LargestUInt(value) ); +} + +#endif // # if defined(JSON_HAS_INT64) + + +std::string valueToString( double value ) +{ + char buffer[32]; +#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning. + sprintf_s(buffer, sizeof(buffer), "%#.16g", value); +#else + sprintf(buffer, "%#.16g", value); +#endif + char* ch = buffer + strlen(buffer) - 1; + if (*ch != '0') return buffer; // nothing to truncate, so save time + while(ch > buffer && *ch == '0'){ + --ch; + } + char* last_nonzero = ch; + while(ch >= buffer){ + switch(*ch){ + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + --ch; + continue; + case '.': + // Truncate zeroes to save bytes in output, but keep one. + *(last_nonzero+2) = '\0'; + return buffer; + default: + return buffer; + } + } + return buffer; +} + + +std::string valueToString( bool value ) +{ + return value ? "true" : "false"; +} + +std::string valueToQuotedString( const char *value ) +{ + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = strlen(value)*2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c=value; *c != 0; ++c) + { + switch(*c) + { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + //case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } + else + { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() +{ +} + + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_( false ) +{ +} + + +void +FastWriter::enableYAMLCompatibility() +{ + yamlCompatiblityEnabled_ = true; +} + + +std::string +FastWriter::write( const Value &root ) +{ + document_ = ""; + writeValue( root ); + document_ += "\n"; + return document_; +} + + +void +FastWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + document_ += "null"; + break; + case intValue: + document_ += valueToString( value.asLargestInt() ); + break; + case uintValue: + document_ += valueToString( value.asLargestUInt() ); + break; + case realValue: + document_ += valueToString( value.asDouble() ); + break; + case stringValue: + document_ += valueToQuotedString( value.asCString() ); + break; + case booleanValue: + document_ += valueToString( value.asBool() ); + break; + case arrayValue: + { + document_ += "["; + int size = value.size(); + for ( int index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ","; + writeValue( value[index] ); + } + document_ += "]"; + } + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + document_ += "{"; + for ( Value::Members::iterator it = members.begin(); + it != members.end(); + ++it ) + { + const std::string &name = *it; + if ( it != members.begin() ) + document_ += ","; + document_ += valueToQuotedString( name.c_str() ); + document_ += yamlCompatiblityEnabled_ ? ": " + : ":"; + writeValue( value[name] ); + } + document_ += "}"; + } + break; + } +} + + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_( 74 ) + , indentSize_( 3 ) +{ +} + + +std::string +StyledWriter::write( const Value &root ) +{ + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + document_ += "\n"; + return document_; +} + + +void +StyledWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + document_ += " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + for (;;) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + document_ += ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + document_ += "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + + +bool +StyledWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + document_ += value; +} + + +void +StyledWriter::writeIndent() +{ + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + + +void +StyledWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + document_ += value; +} + + +void +StyledWriter::indent() +{ + indentString_ += std::string( indentSize_, ' ' ); +} + + +void +StyledWriter::unindent() +{ + assert( int(indentString_.size()) >= indentSize_ ); + indentString_.resize( indentString_.size() - indentSize_ ); +} + + +void +StyledWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + document_ += normalizeEOL( root.getComment( commentBefore ) ); + document_ += "\n"; +} + + +void +StyledWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + document_ += " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + document_ += "\n"; + document_ += normalizeEOL( root.getComment( commentAfter ) ); + document_ += "\n"; + } +} + + +bool +StyledWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter( std::string indentation ) + : document_(NULL) + , rightMargin_( 74 ) + , indentation_( indentation ) +{ +} + + +void +StyledStreamWriter::write( std::ostream &out, const Value &root ) +{ + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue( root ); + writeValue( root ); + writeCommentAfterValueOnSameLine( root ); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + + +void +StyledStreamWriter::writeValue( const Value &value ) +{ + switch ( value.type() ) + { + case nullValue: + pushValue( "null" ); + break; + case intValue: + pushValue( valueToString( value.asLargestInt() ) ); + break; + case uintValue: + pushValue( valueToString( value.asLargestUInt() ) ); + break; + case realValue: + pushValue( valueToString( value.asDouble() ) ); + break; + case stringValue: + pushValue( valueToQuotedString( value.asCString() ) ); + break; + case booleanValue: + pushValue( valueToString( value.asBool() ) ); + break; + case arrayValue: + writeArrayValue( value); + break; + case objectValue: + { + Value::Members members( value.getMemberNames() ); + if ( members.empty() ) + pushValue( "{}" ); + else + { + writeWithIndent( "{" ); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) + { + const std::string &name = *it; + const Value &childValue = value[name]; + writeCommentBeforeValue( childValue ); + writeWithIndent( valueToQuotedString( name.c_str() ) ); + *document_ << " : "; + writeValue( childValue ); + if ( ++it == members.end() ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "}" ); + } + } + break; + } +} + + +void +StyledStreamWriter::writeArrayValue( const Value &value ) +{ + unsigned size = value.size(); + if ( size == 0 ) + pushValue( "[]" ); + else + { + bool isArrayMultiLine = isMultineArray( value ); + if ( isArrayMultiLine ) + { + writeWithIndent( "[" ); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index =0; + for (;;) + { + const Value &childValue = value[index]; + writeCommentBeforeValue( childValue ); + if ( hasChildValue ) + writeWithIndent( childValues_[index] ); + else + { + writeIndent(); + writeValue( childValue ); + } + if ( ++index == size ) + { + writeCommentAfterValueOnSameLine( childValue ); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine( childValue ); + } + unindent(); + writeWithIndent( "]" ); + } + else // output on a single line + { + assert( childValues_.size() == size ); + *document_ << "[ "; + for ( unsigned index =0; index < size; ++index ) + { + if ( index > 0 ) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + + +bool +StyledStreamWriter::isMultineArray( const Value &value ) +{ + int size = value.size(); + bool isMultiLine = size*3 >= rightMargin_ ; + childValues_.clear(); + for ( int index =0; index < size && !isMultiLine; ++index ) + { + const Value &childValue = value[index]; + isMultiLine = isMultiLine || + ( (childValue.isArray() || childValue.isObject()) && + childValue.size() > 0 ); + } + if ( !isMultiLine ) // check if line length > max line length + { + childValues_.reserve( size ); + addChildValues_ = true; + int lineLength = 4 + (size-1)*2; // '[ ' + ', '*n + ' ]' + for ( int index =0; index < size && !isMultiLine; ++index ) + { + writeValue( value[index] ); + lineLength += int( childValues_[index].length() ); + isMultiLine = isMultiLine && hasCommentForValue( value[index] ); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + + +void +StyledStreamWriter::pushValue( const std::string &value ) +{ + if ( addChildValues_ ) + childValues_.push_back( value ); + else + *document_ << value; +} + + +void +StyledStreamWriter::writeIndent() +{ + /* + Some comments in this method would have been nice. ;-) + + if ( !document_.empty() ) + { + char last = document_[document_.length()-1]; + if ( last == ' ' ) // already indented + return; + if ( last != '\n' ) // Comments may add new-line + *document_ << '\n'; + } + */ + *document_ << '\n' << indentString_; +} + + +void +StyledStreamWriter::writeWithIndent( const std::string &value ) +{ + writeIndent(); + *document_ << value; +} + + +void +StyledStreamWriter::indent() +{ + indentString_ += indentation_; +} + + +void +StyledStreamWriter::unindent() +{ + assert( indentString_.size() >= indentation_.size() ); + indentString_.resize( indentString_.size() - indentation_.size() ); +} + + +void +StyledStreamWriter::writeCommentBeforeValue( const Value &root ) +{ + if ( !root.hasComment( commentBefore ) ) + return; + *document_ << normalizeEOL( root.getComment( commentBefore ) ); + *document_ << "\n"; +} + + +void +StyledStreamWriter::writeCommentAfterValueOnSameLine( const Value &root ) +{ + if ( root.hasComment( commentAfterOnSameLine ) ) + *document_ << " " + normalizeEOL( root.getComment( commentAfterOnSameLine ) ); + + if ( root.hasComment( commentAfter ) ) + { + *document_ << "\n"; + *document_ << normalizeEOL( root.getComment( commentAfter ) ); + *document_ << "\n"; + } +} + + +bool +StyledStreamWriter::hasCommentForValue( const Value &value ) +{ + return value.hasComment( commentBefore ) + || value.hasComment( commentAfterOnSameLine ) + || value.hasComment( commentAfter ); +} + + +std::string +StyledStreamWriter::normalizeEOL( const std::string &text ) +{ + std::string normalized; + normalized.reserve( text.length() ); + const char *begin = text.c_str(); + const char *end = begin + text.length(); + const char *current = begin; + while ( current != end ) + { + char c = *current++; + if ( c == '\r' ) // mac or dos EOL + { + if ( *current == '\n' ) // convert dos EOL + ++current; + normalized += '\n'; + } + else // handle unix EOL & other char + normalized += c; + } + return normalized; +} + + +std::ostream& operator<<( std::ostream &sout, const Value &root ) +{ + Json::StyledStreamWriter writer; + writer.write(sout, root); + return sout; +} + + +} // namespace Json diff --git a/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript new file mode 100644 index 0000000..6e7c6c8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/lib_json/sconscript @@ -0,0 +1,8 @@ +Import( 'env buildLibrary' ) + +buildLibrary( env, Split( """ + json_reader.cpp + json_value.cpp + json_writer.cpp + """ ), + 'json' ) diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp new file mode 100644 index 0000000..02e7b21 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.cpp @@ -0,0 +1,608 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC +#include "jsontest.h" +#include +#include + +#if defined(_MSC_VER) +// Used to install a report hook that prevent dialog on assertion and error. +# include +#endif // if defined(_MSC_VER) + +#if defined(_WIN32) +// Used to prevent dialog on memory fault. +// Limits headers included by Windows.h +# define WIN32_LEAN_AND_MEAN +# define NOSERVICE +# define NOMCX +# define NOIME +# define NOSOUND +# define NOCOMM +# define NORPC +# define NOGDI +# define NOUSER +# define NODRIVERS +# define NOLOGERROR +# define NOPROFILER +# define NOMEMMGR +# define NOLFILEIO +# define NOOPENFILE +# define NORESOURCE +# define NOATOM +# define NOLANGUAGE +# define NOLSTRING +# define NODBCS +# define NOKEYBOARDINFO +# define NOGDICAPMASKS +# define NOCOLOR +# define NOGDIOBJ +# define NODRAWTEXT +# define NOTEXTMETRIC +# define NOSCALABLEFONT +# define NOBITMAP +# define NORASTEROPS +# define NOMETAFILE +# define NOSYSMETRICS +# define NOSYSTEMPARAMSINFO +# define NOMSG +# define NOWINSTYLES +# define NOWINOFFSETS +# define NOSHOWWINDOW +# define NODEFERWINDOWPOS +# define NOVIRTUALKEYCODES +# define NOKEYSTATES +# define NOWH +# define NOMENUS +# define NOSCROLL +# define NOCLIPBOARD +# define NOICONS +# define NOMB +# define NOSYSCOMMANDS +# define NOMDI +# define NOCTLMGR +# define NOWINMESSAGES +# include +#endif // if defined(_WIN32) + +namespace JsonTest { + + +// class TestResult +// ////////////////////////////////////////////////////////////////// + +TestResult::TestResult() + : predicateId_( 1 ) + , lastUsedPredicateId_( 0 ) + , messageTarget_( 0 ) +{ + // The root predicate has id 0 + rootPredicateNode_.id_ = 0; + rootPredicateNode_.next_ = 0; + predicateStackTail_ = &rootPredicateNode_; +} + + +void +TestResult::setTestName( const std::string &name ) +{ + name_ = name; +} + +TestResult & +TestResult::addFailure( const char *file, unsigned int line, + const char *expr ) +{ + /// Walks the PredicateContext stack adding them to failures_ if not already added. + unsigned int nestingLevel = 0; + PredicateContext *lastNode = rootPredicateNode_.next_; + for ( ; lastNode != 0; lastNode = lastNode->next_ ) + { + if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext + { + lastUsedPredicateId_ = lastNode->id_; + addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_, + nestingLevel ); + // Link the PredicateContext to the failure for message target when + // popping the PredicateContext. + lastNode->failure_ = &( failures_.back() ); + } + ++nestingLevel; + } + + // Adds the failed assertion + addFailureInfo( file, line, expr, nestingLevel ); + messageTarget_ = &( failures_.back() ); + return *this; +} + + +void +TestResult::addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ) +{ + Failure failure; + failure.file_ = file; + failure.line_ = line; + if ( expr ) + { + failure.expr_ = expr; + } + failure.nestingLevel_ = nestingLevel; + failures_.push_back( failure ); +} + + +TestResult & +TestResult::popPredicateContext() +{ + PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 ) + { + lastNode = lastNode->next_; + } + // Set message target to popped failure + PredicateContext *tail = lastNode->next_; + if ( tail != 0 && tail->failure_ != 0 ) + { + messageTarget_ = tail->failure_; + } + // Remove tail from list + predicateStackTail_ = lastNode; + lastNode->next_ = 0; + return *this; +} + + +bool +TestResult::failed() const +{ + return !failures_.empty(); +} + + +unsigned int +TestResult::getAssertionNestingLevel() const +{ + unsigned int level = 0; + const PredicateContext *lastNode = &rootPredicateNode_; + while ( lastNode->next_ != 0 ) + { + lastNode = lastNode->next_; + ++level; + } + return level; +} + + +void +TestResult::printFailure( bool printTestName ) const +{ + if ( failures_.empty() ) + { + return; + } + + if ( printTestName ) + { + printf( "* Detail of %s test failure:\n", name_.c_str() ); + } + + // Print in reverse to display the callstack in the right order + Failures::const_iterator itEnd = failures_.end(); + for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it ) + { + const Failure &failure = *it; + std::string indent( failure.nestingLevel_ * 2, ' ' ); + if ( failure.file_ ) + { + printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ ); + } + if ( !failure.expr_.empty() ) + { + printf( "%s\n", failure.expr_.c_str() ); + } + else if ( failure.file_ ) + { + printf( "\n" ); + } + if ( !failure.message_.empty() ) + { + std::string reindented = indentText( failure.message_, indent + " " ); + printf( "%s\n", reindented.c_str() ); + } + } +} + + +std::string +TestResult::indentText( const std::string &text, + const std::string &indent ) +{ + std::string reindented; + std::string::size_type lastIndex = 0; + while ( lastIndex < text.size() ) + { + std::string::size_type nextIndex = text.find( '\n', lastIndex ); + if ( nextIndex == std::string::npos ) + { + nextIndex = text.size() - 1; + } + reindented += indent; + reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 ); + lastIndex = nextIndex + 1; + } + return reindented; +} + + +TestResult & +TestResult::addToLastFailure( const std::string &message ) +{ + if ( messageTarget_ != 0 ) + { + messageTarget_->message_ += message; + } + return *this; +} + + +TestResult & +TestResult::operator << ( bool value ) +{ + return addToLastFailure( value ? "true" : "false" ); +} + + +TestResult & +TestResult::operator << ( int value ) +{ + char buffer[32]; + sprintf( buffer, "%d", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( unsigned int value ) +{ + char buffer[32]; + sprintf( buffer, "%u", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( double value ) +{ + char buffer[32]; + sprintf( buffer, "%16g", value ); + return addToLastFailure( buffer ); +} + + +TestResult & +TestResult::operator << ( const char *value ) +{ + return addToLastFailure( value ? value + : "" ); +} + + +TestResult & +TestResult::operator << ( const std::string &value ) +{ + return addToLastFailure( value ); +} + + + +// class TestCase +// ////////////////////////////////////////////////////////////////// + +TestCase::TestCase() + : result_( 0 ) +{ +} + + +TestCase::~TestCase() +{ +} + + +void +TestCase::run( TestResult &result ) +{ + result_ = &result; + runTestCase(); +} + + + +// class Runner +// ////////////////////////////////////////////////////////////////// + +Runner::Runner() +{ +} + + +Runner & +Runner::add( TestCaseFactory factory ) +{ + tests_.push_back( factory ); + return *this; +} + + +unsigned int +Runner::testCount() const +{ + return static_cast( tests_.size() ); +} + + +std::string +Runner::testNameAt( unsigned int index ) const +{ + TestCase *test = tests_[index](); + std::string name = test->testName(); + delete test; + return name; +} + + +void +Runner::runTestAt( unsigned int index, TestResult &result ) const +{ + TestCase *test = tests_[index](); + result.setTestName( test->testName() ); + printf( "Testing %s: ", test->testName() ); + fflush( stdout ); +#if JSON_USE_EXCEPTION + try + { +#endif // if JSON_USE_EXCEPTION + test->run( result ); +#if JSON_USE_EXCEPTION + } + catch ( const std::exception &e ) + { + result.addFailure( __FILE__, __LINE__, + "Unexpected exception caugth:" ) << e.what(); + } +#endif // if JSON_USE_EXCEPTION + delete test; + const char *status = result.failed() ? "FAILED" + : "OK"; + printf( "%s\n", status ); + fflush( stdout ); +} + + +bool +Runner::runAllTest( bool printSummary ) const +{ + unsigned int count = testCount(); + std::deque failures; + for ( unsigned int index = 0; index < count; ++index ) + { + TestResult result; + runTestAt( index, result ); + if ( result.failed() ) + { + failures.push_back( result ); + } + } + + if ( failures.empty() ) + { + if ( printSummary ) + { + printf( "All %d tests passed\n", count ); + } + return true; + } + else + { + for ( unsigned int index = 0; index < failures.size(); ++index ) + { + TestResult &result = failures[index]; + result.printFailure( count > 1 ); + } + + if ( printSummary ) + { + unsigned int failedCount = static_cast( failures.size() ); + unsigned int passedCount = count - failedCount; + printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount ); + } + return false; + } +} + + +bool +Runner::testIndex( const std::string &testName, + unsigned int &indexOut ) const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + if ( testNameAt(index) == testName ) + { + indexOut = index; + return true; + } + } + return false; +} + + +void +Runner::listTests() const +{ + unsigned int count = testCount(); + for ( unsigned int index = 0; index < count; ++index ) + { + printf( "%s\n", testNameAt( index ).c_str() ); + } +} + + +int +Runner::runCommandLine( int argc, const char *argv[] ) const +{ + typedef std::deque TestNames; + Runner subrunner; + for ( int index = 1; index < argc; ++index ) + { + std::string opt = argv[index]; + if ( opt == "--list-tests" ) + { + listTests(); + return 0; + } + else if ( opt == "--test-auto" ) + { + preventDialogOnCrash(); + } + else if ( opt == "--test" ) + { + ++index; + if ( index < argc ) + { + unsigned int testNameIndex; + if ( testIndex( argv[index], testNameIndex ) ) + { + subrunner.add( tests_[testNameIndex] ); + } + else + { + fprintf( stderr, "Test '%s' does not exist!\n", argv[index] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + else + { + printUsage( argv[0] ); + return 2; + } + } + bool succeeded; + if ( subrunner.testCount() > 0 ) + { + succeeded = subrunner.runAllTest( subrunner.testCount() > 1 ); + } + else + { + succeeded = runAllTest( true ); + } + return succeeded ? 0 + : 1; +} + + +#if defined(_MSC_VER) +// Hook MSVCRT assertions to prevent dialog from appearing +static int +msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +{ + // The default CRT handling of error and assertion is to display + // an error dialog to the user. + // Instead, when an error or an assertion occurs, we force the + // application to terminate using abort() after display + // the message on stderr. + if ( reportType == _CRT_ERROR || + reportType == _CRT_ASSERT ) + { + // calling abort() cause the ReportHook to be called + // The following is used to detect this case and let's the + // error handler fallback on its default behaviour ( + // display a warning message) + static volatile bool isAborting = false; + if ( isAborting ) + { + return TRUE; + } + isAborting = true; + + fprintf( stderr, "CRT Error/Assert:\n%s\n", message ); + fflush( stderr ); + abort(); + } + // Let's other reportType (_CRT_WARNING) be handled as they would by default + return FALSE; +} +#endif // if defined(_MSC_VER) + + +void +Runner::preventDialogOnCrash() +{ +#if defined(_MSC_VER) + // Install a hook to prevent MSVCRT error and assertion from + // popping a dialog. + _CrtSetReportHook( &msvcrtSilentReportHook ); +#endif // if defined(_MSC_VER) + + // @todo investiguate this handler (for buffer overflow) + // _set_security_error_handler + +#if defined(_WIN32) + // Prevents the system from popping a dialog for debugging if the + // application fails due to invalid memory access. + SetErrorMode( SEM_FAILCRITICALERRORS + | SEM_NOGPFAULTERRORBOX + | SEM_NOOPENFILEERRORBOX ); +#endif // if defined(_WIN32) +} + +void +Runner::printUsage( const char *appName ) +{ + printf( + "Usage: %s [options]\n" + "\n" + "If --test is not specified, then all the test cases be run.\n" + "\n" + "Valid options:\n" + "--list-tests: print the name of all test cases on the standard\n" + " output and exit.\n" + "--test TESTNAME: executes the test case with the specified name.\n" + " May be repeated.\n" + "--test-auto: prevent dialog prompting for debugging on crash.\n" + , appName ); +} + + + +// Assertion functions +// ////////////////////////////////////////////////////////////////// + +TestResult & +checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ) +{ + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: '" << expected << "'\n"; + result << "Actual : '" << actual << "'"; + } + return result; +} + + +} // namespace JsonTest diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h new file mode 100644 index 0000000..0d07238 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/jsontest.h @@ -0,0 +1,259 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSONTEST_H_INCLUDED +# define JSONTEST_H_INCLUDED + +# include +# include +# include +# include + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Mini Unit Testing framework +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + + +/** \brief Unit testing framework. + * \warning: all assertions are non-aborting, test case execution will continue + * even if an assertion namespace. + * This constraint is for portability: the framework needs to compile + * on Visual Studio 6 and must not require exception usage. + */ +namespace JsonTest { + + + class Failure + { + public: + const char *file_; + unsigned int line_; + std::string expr_; + std::string message_; + unsigned int nestingLevel_; + }; + + + /// Context used to create the assertion callstack on failure. + /// Must be a POD to allow inline initialisation without stepping + /// into the debugger. + struct PredicateContext + { + typedef unsigned int Id; + Id id_; + const char *file_; + unsigned int line_; + const char *expr_; + PredicateContext *next_; + /// Related Failure, set when the PredicateContext is converted + /// into a Failure. + Failure *failure_; + }; + + class TestResult + { + public: + TestResult(); + + /// \internal Implementation detail for assertion macros + /// Not encapsulated to prevent step into when debugging failed assertions + /// Incremented by one on assertion predicate entry, decreased by one + /// by addPredicateContext(). + PredicateContext::Id predicateId_; + + /// \internal Implementation detail for predicate macros + PredicateContext *predicateStackTail_; + + void setTestName( const std::string &name ); + + /// Adds an assertion failure. + TestResult &addFailure( const char *file, unsigned int line, + const char *expr = 0 ); + + /// Removes the last PredicateContext added to the predicate stack + /// chained list. + /// Next messages will be targed at the PredicateContext that was removed. + TestResult &popPredicateContext(); + + bool failed() const; + + void printFailure( bool printTestName ) const; + + TestResult &operator << ( bool value ); + TestResult &operator << ( int value ); + TestResult &operator << ( unsigned int value ); + TestResult &operator << ( double value ); + TestResult &operator << ( const char *value ); + TestResult &operator << ( const std::string &value ); + + private: + TestResult &addToLastFailure( const std::string &message ); + unsigned int getAssertionNestingLevel() const; + /// Adds a failure or a predicate context + void addFailureInfo( const char *file, unsigned int line, + const char *expr, unsigned int nestingLevel ); + static std::string indentText( const std::string &text, + const std::string &indent ); + + typedef std::deque Failures; + Failures failures_; + std::string name_; + PredicateContext rootPredicateNode_; + PredicateContext::Id lastUsedPredicateId_; + /// Failure which is the target of the messages added using operator << + Failure *messageTarget_; + }; + + + class TestCase + { + public: + TestCase(); + + virtual ~TestCase(); + + void run( TestResult &result ); + + virtual const char *testName() const = 0; + + protected: + TestResult *result_; + + private: + virtual void runTestCase() = 0; + }; + + /// Function pointer type for TestCase factory + typedef TestCase *(*TestCaseFactory)(); + + class Runner + { + public: + Runner(); + + /// Adds a test to the suite + Runner &add( TestCaseFactory factory ); + + /// Runs test as specified on the command-line + /// If no command-line arguments are provided, run all tests. + /// If --list-tests is provided, then print the list of all test cases + /// If --test is provided, then run test testname. + int runCommandLine( int argc, const char *argv[] ) const; + + /// Runs all the test cases + bool runAllTest( bool printSummary ) const; + + /// Returns the number of test case in the suite + unsigned int testCount() const; + + /// Returns the name of the test case at the specified index + std::string testNameAt( unsigned int index ) const; + + /// Runs the test case at the specified index using the specified TestResult + void runTestAt( unsigned int index, TestResult &result ) const; + + static void printUsage( const char *appName ); + + private: // prevents copy construction and assignment + Runner( const Runner &other ); + Runner &operator =( const Runner &other ); + + private: + void listTests() const; + bool testIndex( const std::string &testName, unsigned int &index ) const; + static void preventDialogOnCrash(); + + private: + typedef std::deque Factories; + Factories tests_; + }; + + template + TestResult & + checkEqual( TestResult &result, const T &expected, const T &actual, + const char *file, unsigned int line, const char *expr ) + { + if ( expected != actual ) + { + result.addFailure( file, line, expr ); + result << "Expected: " << expected << "\n"; + result << "Actual : " << actual; + } + return result; + } + + TestResult & + checkStringEqual( TestResult &result, + const std::string &expected, const std::string &actual, + const char *file, unsigned int line, const char *expr ); + +} // namespace JsonTest + + +/// \brief Asserts that the given expression is true. +/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y; +/// JSONTEST_ASSERT( x == y ); +#define JSONTEST_ASSERT( expr ) \ + if ( expr ) \ + { \ + } \ + else \ + result_->addFailure( __FILE__, __LINE__, #expr ) + +/// \brief Asserts that the given predicate is true. +/// The predicate may do other assertions and be a member function of the fixture. +#define JSONTEST_ASSERT_PRED( expr ) \ + { \ + JsonTest::PredicateContext _minitest_Context = { \ + result_->predicateId_, __FILE__, __LINE__, #expr }; \ + result_->predicateStackTail_->next_ = &_minitest_Context; \ + result_->predicateId_ += 1; \ + result_->predicateStackTail_ = &_minitest_Context; \ + (expr); \ + result_->popPredicateContext(); \ + } \ + *result_ + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_EQUAL( expected, actual ) \ + JsonTest::checkEqual( *result_, expected, actual, \ + __FILE__, __LINE__, \ + #expected " == " #actual ) + +/// \brief Asserts that two values are equals. +#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ + JsonTest::checkStringEqual( *result_, \ + std::string(expected), std::string(actual), \ + #expected " == " #actual ) + +/// \brief Begin a fixture test case. +#define JSONTEST_FIXTURE( FixtureType, name ) \ + class Test##FixtureType##name : public FixtureType \ + { \ + public: \ + static JsonTest::TestCase *factory() \ + { \ + return new Test##FixtureType##name(); \ + } \ + public: /* overidden from TestCase */ \ + virtual const char *testName() const \ + { \ + return #FixtureType "/" #name; \ + } \ + virtual void runTestCase(); \ + }; \ + \ + void Test##FixtureType##name::runTestCase() + +#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \ + &Test##FixtureType##name::factory + +#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \ + (runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) ) + +#endif // ifndef JSONTEST_H_INCLUDED diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp new file mode 100644 index 0000000..3275219 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/main.cpp @@ -0,0 +1,430 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#include +#include "jsontest.h" + + +// TODO: +// - boolean value returns that they are integral. Should not be. +// - unsigned integer in integer range are not considered to be valid integer. Should check range. + + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// Json Library test cases +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + + +struct ValueTest : JsonTest::TestCase +{ + Json::Value null_; + Json::Value emptyArray_; + Json::Value emptyObject_; + Json::Value integer_; + Json::Value unsignedInteger_; + Json::Value smallUnsignedInteger_; + Json::Value real_; + Json::Value float_; + Json::Value array1_; + Json::Value object1_; + Json::Value emptyString_; + Json::Value string1_; + Json::Value string_; + Json::Value true_; + Json::Value false_; + + + ValueTest() + : emptyArray_( Json::arrayValue ) + , emptyObject_( Json::objectValue ) + , integer_( 123456789 ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) + , unsignedInteger_( 34567890u ) + , real_( 1234.56789 ) + , float_( 0.00390625f ) + , emptyString_( "" ) + , string1_( "a" ) + , string_( "sometext with space" ) + , true_( true ) + , false_( false ) + { + array1_.append( 1234 ); + object1_["id"] = 1234; + } + + struct IsCheck + { + /// Initialize all checks to \c false by default. + IsCheck(); + + bool isObject_; + bool isArray_; + bool isBool_; + bool isDouble_; + bool isInt_; + bool isUInt_; + bool isIntegral_; + bool isNumeric_; + bool isString_; + bool isNull_; + }; + + void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); + + void checkMemberCount( Json::Value &value, unsigned int expectedCount ); + + void checkIs( const Json::Value &value, const IsCheck &check ); + + void checkIsLess( const Json::Value &x, const Json::Value &y ); + + void checkIsEqual( const Json::Value &x, const Json::Value &y ); +}; + + +JSONTEST_FIXTURE( ValueTest, size ) +{ + JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); + JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) ); +} + + +JSONTEST_FIXTURE( ValueTest, isObject ) +{ + IsCheck checks; + checks.isObject_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isArray ) +{ + IsCheck checks; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isNull ) +{ + IsCheck checks; + checks.isNull_ = true; + checks.isObject_ = true; + checks.isArray_ = true; + JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isString ) +{ + IsCheck checks; + checks.isString_ = true; + JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isBool ) +{ + IsCheck checks; + checks.isBool_ = true; + checks.isIntegral_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isDouble ) +{ + IsCheck checks; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isInt ) +{ + IsCheck checks; + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, isUInt ) +{ + IsCheck checks; + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); +} + + +JSONTEST_FIXTURE( ValueTest, accessArray ) +{ + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; +} + + +JSONTEST_FIXTURE( ValueTest, asFloat ) +{ + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; +} + +void +ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) +{ + unsigned int count = 0; + Json::Value::const_iterator itEnd = value.end(); + for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator"; +} + +void +ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) +{ + JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + + unsigned int count = 0; + Json::Value::iterator itEnd = value.end(); + for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it ) + { + ++count; + } + JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator"; + + JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) ); +} + + +ValueTest::IsCheck::IsCheck() + : isObject_( false ) + , isArray_( false ) + , isBool_( false ) + , isDouble_( false ) + , isInt_( false ) + , isUInt_( false ) + , isIntegral_( false ) + , isNumeric_( false ) + , isString_( false ) + , isNull_( false ) +{ +} + + +void +ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) +{ + JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); +} + + +JSONTEST_FIXTURE( ValueTest, compareNull ) +{ + JSONTEST_ASSERT_PRED( checkIsEqual( Json::Value(), Json::Value() ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10, 10 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10, -10 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10, 0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareUInt ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0u, 10u ) ); + JSONTEST_ASSERT_PRED( checkIsLess( 0u, Json::Value::maxUInt ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10u, 10u ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareDouble ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( 0.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( 10.0, 10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( -10.0, -10.0 ) ); + JSONTEST_ASSERT_PRED( checkIsLess( -10.0, 0.0 ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareString ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( "", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "", "a" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abcd", "zyui" ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "abc", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "abcd", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( " ", " " ) ); + JSONTEST_ASSERT_PRED( checkIsLess( "ABCD", "abcd" ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( "ABCD", "ABCD" ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareBoolean ) +{ + JSONTEST_ASSERT_PRED( checkIsLess( false, true ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( false, false ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( true, true ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareArray ) +{ + // array compare size then content + Json::Value emptyArray(Json::arrayValue); + Json::Value l1aArray; + l1aArray.append( 0 ); + Json::Value l1bArray; + l1bArray.append( 10 ); + Json::Value l2aArray; + l2aArray.append( 0 ); + l2aArray.append( 0 ); + Json::Value l2bArray; + l2bArray.append( 0 ); + l2bArray.append( 10 ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l1aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aArray, l2aArray ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l2aArray, l2bArray ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyArray, Json::Value( emptyArray ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aArray, Json::Value( l1aArray) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2bArray, Json::Value( l2bArray) ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareObject ) +{ + // object compare size then content + Json::Value emptyObject(Json::objectValue); + Json::Value l1aObject; + l1aObject["key1"] = 0; + Json::Value l1bObject; + l1aObject["key1"] = 10; + Json::Value l2aObject; + l2aObject["key1"] = 0; + l2aObject["key2"] = 0; + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l1aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( emptyObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsLess( l1aObject, l2aObject ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( emptyObject, Json::Value( emptyObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l1aObject, Json::Value( l1aObject ) ) ); + JSONTEST_ASSERT_PRED( checkIsEqual( l2aObject, Json::Value( l2aObject ) ) ); +} + + +JSONTEST_FIXTURE( ValueTest, compareType ) +{ + // object of different type are ordered according to their type + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(), Json::Value(1) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1), Json::Value(1u) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1u), Json::Value(1.0) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(1.0), Json::Value("a") ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value("a"), Json::Value(true) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(true), Json::Value(Json::arrayValue) ) ); + JSONTEST_ASSERT_PRED( checkIsLess( Json::Value(Json::arrayValue), Json::Value(Json::objectValue) ) ); +} + + +void +ValueTest::checkIsLess( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x < y ); + JSONTEST_ASSERT( y > x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x == y) ); + JSONTEST_ASSERT( !(y == x) ); + JSONTEST_ASSERT( !(x >= y) ); + JSONTEST_ASSERT( !(y <= x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( x.compare( y ) < 0 ); + JSONTEST_ASSERT( y.compare( x ) >= 0 ); +} + + +void +ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) +{ + JSONTEST_ASSERT( x == y ); + JSONTEST_ASSERT( y == x ); + JSONTEST_ASSERT( x <= y ); + JSONTEST_ASSERT( y <= x ); + JSONTEST_ASSERT( x >= y ); + JSONTEST_ASSERT( y >= x ); + JSONTEST_ASSERT( !(x < y) ); + JSONTEST_ASSERT( !(y < x) ); + JSONTEST_ASSERT( !(x > y) ); + JSONTEST_ASSERT( !(y > x) ); + JSONTEST_ASSERT( x.compare( y ) == 0 ); + JSONTEST_ASSERT( y.compare( x ) == 0 ); +} + + +int main( int argc, const char *argv[] ) +{ + JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareDouble ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareString ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareBoolean ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareType ); + return runner.runCommandLine( argc, argv ); +} diff --git a/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript new file mode 100644 index 0000000..915fd01 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/src/test_lib_json/sconscript @@ -0,0 +1,10 @@ +Import( 'env_testing buildUnitTests' ) + +buildUnitTests( env_testing, Split( """ + main.cpp + jsontest.cpp + """ ), + 'test_lib_json' ) + +# For 'check' to work, 'libs' must be built first. +env_testing.Depends('test_lib_json', '#libs') diff --git a/tags/jsoncpp/0.6.0-rc2/test/cleantests.py b/tags/jsoncpp/0.6.0-rc2/test/cleantests.py new file mode 100644 index 0000000..c38fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/cleantests.py @@ -0,0 +1,10 @@ +# removes all files created during testing +import glob +import os + +paths = [] +for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: + paths += glob.glob( 'data/' + pattern ) + +for path in paths: + os.unlink( path ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json new file mode 100644 index 0000000..900fcc2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/fail_test_array_01.json @@ -0,0 +1 @@ +[ 1 2 3] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected new file mode 100644 index 0000000..a341ff7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.expected @@ -0,0 +1 @@ +.=[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json new file mode 100644 index 0000000..fe51488 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_01.json @@ -0,0 +1 @@ +[] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected new file mode 100644 index 0000000..ef1f262 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.expected @@ -0,0 +1,2 @@ +.=[] +.[0]=1 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json new file mode 100644 index 0000000..7660873 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_02.json @@ -0,0 +1 @@ +[1] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected new file mode 100644 index 0000000..3d8dc18 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.expected @@ -0,0 +1,6 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json new file mode 100644 index 0000000..9b3f924 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_03.json @@ -0,0 +1 @@ +[ 1, 2 , 3,4,5] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected new file mode 100644 index 0000000..ad4add9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.expected @@ -0,0 +1,5 @@ +.=[] +.[0]=1 +.[1]="abc" +.[2]=12.3 +.[3]=-4 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json new file mode 100644 index 0000000..ecca546 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_04.json @@ -0,0 +1 @@ +[1, "abc" , 12.3, -4] diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected new file mode 100644 index 0000000..76cff87 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.expected @@ -0,0 +1,100 @@ +.=[] +.[0]=1 +.[1]=2 +.[2]=3 +.[3]=4 +.[4]=5 +.[5]=6 +.[6]=7 +.[7]=8 +.[8]=9 +.[9]=10 +.[10]=11 +.[11]=12 +.[12]=13 +.[13]=14 +.[14]=15 +.[15]=16 +.[16]=17 +.[17]=18 +.[18]=19 +.[19]=20 +.[20]=21 +.[21]=22 +.[22]=23 +.[23]=24 +.[24]=25 +.[25]=26 +.[26]=27 +.[27]=28 +.[28]=29 +.[29]=30 +.[30]=31 +.[31]=32 +.[32]=33 +.[33]=34 +.[34]=35 +.[35]=36 +.[36]=37 +.[37]=38 +.[38]=39 +.[39]=40 +.[40]=41 +.[41]=42 +.[42]=43 +.[43]=44 +.[44]=45 +.[45]=46 +.[46]=47 +.[47]=48 +.[48]=49 +.[49]=50 +.[50]=51 +.[51]=52 +.[52]=53 +.[53]=54 +.[54]=55 +.[55]=56 +.[56]=57 +.[57]=58 +.[58]=59 +.[59]=60 +.[60]=61 +.[61]=62 +.[62]=63 +.[63]=64 +.[64]=65 +.[65]=66 +.[66]=67 +.[67]=68 +.[68]=69 +.[69]=70 +.[70]=71 +.[71]=72 +.[72]=73 +.[73]=74 +.[74]=75 +.[75]=76 +.[76]=77 +.[77]=78 +.[78]=79 +.[79]=80 +.[80]=81 +.[81]=82 +.[82]=83 +.[83]=84 +.[84]=85 +.[85]=86 +.[86]=87 +.[87]=88 +.[88]=89 +.[89]=90 +.[90]=91 +.[91]=92 +.[92]=93 +.[93]=94 +.[94]=95 +.[95]=96 +.[96]=97 +.[97]=98 +.[98]=99 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json new file mode 100644 index 0000000..7809d6c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_05.json @@ -0,0 +1 @@ +[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected new file mode 100644 index 0000000..5c9f48e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.expected @@ -0,0 +1,5 @@ +.=[] +.[0]="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +.[1]="bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" +.[2]="ccccccccccccccccccccccc" +.[3]="dddddddddddddddddddddddddddddddddddddddddddddddddddd" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json new file mode 100644 index 0000000..7f6c516 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_array_06.json @@ -0,0 +1,4 @@ +[ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "ccccccccccccccccccccccc", + "dddddddddddddddddddddddddddddddddddddddddddddddddddd" ] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected new file mode 100644 index 0000000..d761fce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.expected @@ -0,0 +1 @@ +.=123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json new file mode 100644 index 0000000..11f11f9 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_01.json @@ -0,0 +1 @@ +0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected new file mode 100644 index 0000000..650e37c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.expected @@ -0,0 +1 @@ +.=-123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json new file mode 100644 index 0000000..bf11bce --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_02.json @@ -0,0 +1 @@ +-0123456789 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected new file mode 100644 index 0000000..1da2d39 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.expected @@ -0,0 +1,3 @@ +.=1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json new file mode 100644 index 0000000..a92b6bd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_03.json @@ -0,0 +1,3 @@ +1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected new file mode 100644 index 0000000..013f424 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.expected @@ -0,0 +1,2 @@ +.="abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json new file mode 100644 index 0000000..17eeb99 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_04.json @@ -0,0 +1,2 @@ +"abcdef" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json new file mode 100644 index 0000000..d0aaea2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_05.json @@ -0,0 +1,2 @@ +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected new file mode 100644 index 0000000..49be55a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.expected @@ -0,0 +1,2 @@ +.=true + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json new file mode 100644 index 0000000..7eead1e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_06.json @@ -0,0 +1,2 @@ +true + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected new file mode 100644 index 0000000..fe55a6a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.expected @@ -0,0 +1,2 @@ +.=false + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json new file mode 100644 index 0000000..a864bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_07.json @@ -0,0 +1,2 @@ +false + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json new file mode 100644 index 0000000..fd78837 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_08.json @@ -0,0 +1,3 @@ +// C++ style comment +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected new file mode 100644 index 0000000..c8db822 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.expected @@ -0,0 +1,2 @@ +.=null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json new file mode 100644 index 0000000..fc95f0f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_basic_09.json @@ -0,0 +1,4 @@ +/* C style comment + */ +null + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected new file mode 100644 index 0000000..0b8f42d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.expected @@ -0,0 +1,8 @@ +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json new file mode 100644 index 0000000..0de8f9c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_comment_01.json @@ -0,0 +1,8 @@ +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected new file mode 100644 index 0000000..7573c88 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.expected @@ -0,0 +1,20 @@ +.={} +.attribute=[] +.attribute[0]="random" +.attribute[1]="short" +.attribute[2]="bold" +.attribute[3]=12 +.attribute[4]={} +.attribute[4].height=7 +.attribute[4].width=64 +.count=1234 +.name={} +.name.aka="T.E.S.T." +.name.id=123987 +.test={} +.test.1={} +.test.1.2={} +.test.1.2.3={} +.test.1.2.3.coord=[] +.test.1.2.3.coord[0]=1 +.test.1.2.3.coord[1]=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json new file mode 100644 index 0000000..cc0f30f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_complex_01.json @@ -0,0 +1,17 @@ +{ + "count" : 1234, + "name" : { "aka" : "T.E.S.T.", "id" : 123987 }, + "attribute" : [ + "random", + "short", + "bold", + 12, + { "height" : 7, "width" : 64 } + ], + "test": { "1" : + { "2" : + { "3" : { "coord" : [ 1,2] } + } + } + } +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected new file mode 100644 index 0000000..593f1db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.expected @@ -0,0 +1 @@ +.=2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json new file mode 100644 index 0000000..5ab12ff --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_01.json @@ -0,0 +1,2 @@ +// Max signed integer +2147483647 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected new file mode 100644 index 0000000..4b83bd7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.expected @@ -0,0 +1 @@ +.=-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json new file mode 100644 index 0000000..056c850 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_02.json @@ -0,0 +1,2 @@ +// Min signed integer +-2147483648 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected new file mode 100644 index 0000000..37c1cb1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.expected @@ -0,0 +1 @@ +.=4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json new file mode 100644 index 0000000..12ef3fb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_03.json @@ -0,0 +1,2 @@ +// Max unsigned integer +4294967295 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected new file mode 100644 index 0000000..b7b548e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.expected @@ -0,0 +1,2 @@ +.=0 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json new file mode 100644 index 0000000..bf81499 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_04.json @@ -0,0 +1,3 @@ +// Min unsigned integer +0 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected new file mode 100644 index 0000000..0caea9d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.expected @@ -0,0 +1,2 @@ +.=1 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json new file mode 100644 index 0000000..d474e1b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_05.json @@ -0,0 +1,2 @@ +1 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected new file mode 100644 index 0000000..bc9520a1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.expected @@ -0,0 +1 @@ +.=9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json new file mode 100644 index 0000000..360d660 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_06_64bits.json @@ -0,0 +1,2 @@ +9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected new file mode 100644 index 0000000..39eb798 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.expected @@ -0,0 +1 @@ +.=-9223372036854775808 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json new file mode 100644 index 0000000..11d8513 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_07_64bits.json @@ -0,0 +1,2 @@ +-9223372036854775808 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected new file mode 100644 index 0000000..831f432 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.expected @@ -0,0 +1 @@ +.=18446744073709551615 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json new file mode 100644 index 0000000..6e1fb04 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_integer_08_64bits.json @@ -0,0 +1,2 @@ +18446744073709551615 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_large_01.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected new file mode 100644 index 0000000..67444e5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.expected @@ -0,0 +1 @@ +.={} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_01.json @@ -0,0 +1 @@ +{} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected new file mode 100644 index 0000000..79391c2 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.expected @@ -0,0 +1,2 @@ +.={} +.count=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json new file mode 100644 index 0000000..d0f2fac --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_02.json @@ -0,0 +1 @@ +{ "count" : 1234 } diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected new file mode 100644 index 0000000..5e96113 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.expected @@ -0,0 +1,4 @@ +.={} +.attribute="random" +.count=1234 +.name="test" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json new file mode 100644 index 0000000..4fcd4d8 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_03.json @@ -0,0 +1,5 @@ +{ + "count" : 1234, + "name" : "test", + "attribute" : "random" +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected new file mode 100644 index 0000000..812965b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.expected @@ -0,0 +1,2 @@ +.={} +.=1234 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json new file mode 100644 index 0000000..450762d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_object_04.json @@ -0,0 +1,3 @@ +{ + "" : 1234 +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected new file mode 100644 index 0000000..8d88041 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.expected @@ -0,0 +1,3 @@ +.={} +.first=1 +.second=2 diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json new file mode 100644 index 0000000..fabd55d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_preserve_comment_01.json @@ -0,0 +1,14 @@ +/* A comment + at the beginning of the file. + */ +{ + "first" : 1, // comment after 'first' on the same line + +/* Comment before 'second' + */ + "second" : 2 +} + +/* A comment at + the end of the file. + */ diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected new file mode 100644 index 0000000..ae23572 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.expected @@ -0,0 +1,2 @@ +.=8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json new file mode 100644 index 0000000..358452d --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_01.json @@ -0,0 +1,3 @@ +// 2^33 => out of integer range, switch to double +8589934592 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_02.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected new file mode 100644 index 0000000..df8de42 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.expected @@ -0,0 +1,2 @@ +.=-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json new file mode 100644 index 0000000..936c706 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_03.json @@ -0,0 +1,3 @@ +// -2^32 => out of signed integer range, switch to double +-4294967295 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected new file mode 100644 index 0000000..d726abe --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.expected @@ -0,0 +1,2 @@ +.=1.2345678 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json new file mode 100644 index 0000000..a8eb6d0 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_04.json @@ -0,0 +1,3 @@ +// 1.2345678 +12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected new file mode 100644 index 0000000..949fd8f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.expected @@ -0,0 +1,3 @@ +.=1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json new file mode 100644 index 0000000..f7923ba --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_05.json @@ -0,0 +1,3 @@ +// 1234567.8 +0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected new file mode 100644 index 0000000..03b7d7f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.expected @@ -0,0 +1,3 @@ +.=-1.2345678 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json new file mode 100644 index 0000000..485419a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_06.json @@ -0,0 +1,3 @@ +// -1.2345678 +-12345678e-7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected new file mode 100644 index 0000000..12025a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.expected @@ -0,0 +1,3 @@ +.=-1234567.8 + + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json new file mode 100644 index 0000000..8013eb5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_real_07.json @@ -0,0 +1,3 @@ +// -1234567.8 +-0.12345678e7 + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected new file mode 100644 index 0000000..8fd37b1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json new file mode 100644 index 0000000..c8c059b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_01.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected new file mode 100644 index 0000000..0443bc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.expected @@ -0,0 +1 @@ +.="!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json new file mode 100644 index 0000000..f0fe56a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_02.json @@ -0,0 +1 @@ +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected new file mode 100644 index 0000000..6ed627a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.expected @@ -0,0 +1 @@ +.="http://jsoncpp.sourceforge.net/" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json new file mode 100644 index 0000000..f0a220f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_03.json @@ -0,0 +1 @@ +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected new file mode 100644 index 0000000..447f85a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.expected @@ -0,0 +1 @@ +.="a" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json new file mode 100644 index 0000000..024114b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_01.json @@ -0,0 +1 @@ +"\u0061" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected new file mode 100644 index 0000000..c0b3b43 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.expected @@ -0,0 +1 @@ +.="¢" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json new file mode 100644 index 0000000..4961024 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_02.json @@ -0,0 +1 @@ +"\u00A2" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected new file mode 100644 index 0000000..7289743 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.expected @@ -0,0 +1 @@ +.="€" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json new file mode 100644 index 0000000..e7e1a9e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_03.json @@ -0,0 +1 @@ +"\u20AC" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected new file mode 100644 index 0000000..868fbc3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.expected @@ -0,0 +1 @@ +.="𝄞" diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json new file mode 100644 index 0000000..dae65c5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_04.json @@ -0,0 +1 @@ +"\uD834\uDD1E" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected new file mode 100644 index 0000000..19b2c40 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.expected @@ -0,0 +1,2 @@ +.="Zażółć gęślą jaźń" + diff --git a/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json new file mode 100644 index 0000000..8770410 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/data/test_string_unicode_05.json @@ -0,0 +1 @@ +"Zażółć gęślą jaźń" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py b/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py new file mode 100644 index 0000000..5b215c4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/generate_expected.py @@ -0,0 +1,11 @@ +import glob +import os.path +for path in glob.glob( '*.json' ): + text = file(path,'rt').read() + target = os.path.splitext(path)[0] + '.expected' + if os.path.exists( target ): + print 'skipping:', target + else: + print 'creating:', target + file(target,'wt').write(text) + diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json new file mode 100644 index 0000000..6216b86 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json new file mode 100644 index 0000000..edac927 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json new file mode 100644 index 0000000..45cba73 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json new file mode 100644 index 0000000..70e2685 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json new file mode 100644 index 0000000..d3c63c7 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt new file mode 100644 index 0000000..0efc2a4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py b/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py new file mode 100644 index 0000000..504f3db --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/pyjsontestrunner.py @@ -0,0 +1,64 @@ +# Simple implementation of a json test runner to run the test against json-py. + +import sys +import os.path +import json +import types + +if len(sys.argv) != 2: + print "Usage: %s input-json-file", sys.argv[0] + sys.exit(3) + +input_path = sys.argv[1] +base_path = os.path.splitext(input_path)[0] +actual_path = base_path + '.actual' +rewrite_path = base_path + '.rewrite' +rewrite_actual_path = base_path + '.actual-rewrite' + +def valueTreeToString( fout, value, path = '.' ): + ty = type(value) + if ty is types.DictType: + fout.write( '%s={}\n' % path ) + suffix = path[-1] != '.' and '.' or '' + names = value.keys() + names.sort() + for name in names: + valueTreeToString( fout, value[name], path + suffix + name ) + elif ty is types.ListType: + fout.write( '%s=[]\n' % path ) + for index, childValue in zip( xrange(0,len(value)), value ): + valueTreeToString( fout, childValue, path + '[%d]' % index ) + elif ty is types.StringType: + fout.write( '%s="%s"\n' % (path,value) ) + elif ty is types.IntType: + fout.write( '%s=%d\n' % (path,value) ) + elif ty is types.FloatType: + fout.write( '%s=%.16g\n' % (path,value) ) + elif value is True: + fout.write( '%s=true\n' % path ) + elif value is False: + fout.write( '%s=false\n' % path ) + elif value is None: + fout.write( '%s=null\n' % path ) + else: + assert False and "Unexpected value type" + +def parseAndSaveValueTree( input, actual_path ): + root = json.loads( input ) + fout = file( actual_path, 'wt' ) + valueTreeToString( fout, root ) + fout.close() + return root + +def rewriteValueTree( value, rewrite_path ): + rewrite = json.dumps( value ) + #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? + file( rewrite_path, 'wt').write( rewrite + '\n' ) + return rewrite + +input = file( input_path, 'rt' ).read() +root = parseAndSaveValueTree( input, actual_path ) +rewrite = rewriteValueTree( json.write( root ), rewrite_path ) +rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) + +sys.exit( 0 ) diff --git a/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py b/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py new file mode 100644 index 0000000..ffe8bd5 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/runjsontests.py @@ -0,0 +1,134 @@ +import sys +import os +import os.path +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' + +def compareOutputs( expected, actual, message ): + expected = expected.strip().replace('\r','').split('\n') + actual = actual.strip().replace('\r','').split('\n') + diff_line = 0 + max_line_to_compare = min( len(expected), len(actual) ) + for index in xrange(0,max_line_to_compare): + if expected[index].strip() != actual[index].strip(): + diff_line = index + 1 + break + if diff_line == 0 and len(expected) != len(actual): + diff_line = max_line_to_compare+1 + if diff_line == 0: + return None + def safeGetLine( lines, index ): + index += -1 + if index >= len(lines): + return '' + return lines[index].strip() + return """ Difference in %s at line %d: + Expected: '%s' + Actual: '%s' +""" % (message, diff_line, + safeGetLine(expected,diff_line), + safeGetLine(actual,diff_line) ) + +def safeReadFile( path ): + try: + return file( path, 'rt' ).read() + except IOError, e: + return '' % (path,e) + +def runAllTests( jsontest_executable_path, input_dir = None, + use_valgrind=False, with_json_checker=False ): + if not input_dir: + input_dir = os.path.join( os.getcwd(), 'data' ) + tests = glob( os.path.join( input_dir, '*.json' ) ) + if with_json_checker: + test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) + else: + test_jsonchecker = [] + failed_tests = [] + valgrind_path = use_valgrind and VALGRIND_CMD or '' + for input_path in tests + test_jsonchecker: + expect_failure = os.path.basename( input_path ).startswith( 'fail' ) + is_json_checker_test = (input_path in test_jsonchecker) or expect_failure + print 'TESTING:', input_path, + options = is_json_checker_test and '--json-checker' or '' + pipe = os.popen( "%s%s %s %s" % ( + valgrind_path, jsontest_executable_path, options, + input_path) ) + process_output = pipe.read() + status = pipe.close() + if is_json_checker_test: + if expect_failure: + if status is None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % + safeReadFile(input_path)) ) + else: + print 'OK' + else: + if status is not None: + print 'FAILED' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + print 'OK' + else: + base_path = os.path.splitext(input_path)[0] + actual_output = safeReadFile( base_path + '.actual' ) + actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) + file(base_path + '.process-output','wt').write( process_output ) + if status: + print 'parsing failed' + failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) + else: + expected_output_path = os.path.splitext(input_path)[0] + '.expected' + expected_output = file( expected_output_path, 'rt' ).read() + detail = ( compareOutputs( expected_output, actual_output, 'input' ) + or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) + if detail: + print 'FAILED' + failed_tests.append( (input_path, detail) ) + else: + print 'OK' + + if failed_tests: + print + print 'Failure details:' + for failed_test in failed_tests: + print '* Test', failed_test[0] + print failed_test[1] + print + print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), + len(failed_tests) ) + return 1 + else: + print 'All %d tests passed.' % len(tests) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] [test case directory]" ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.add_option("-c", "--with-json-checker", + action="store_true", dest="with_json_checker", default=False, + help="run all the tests from the official JSONChecker test suite of json.org") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) < 1 or len(args) > 2: + parser.error( 'Must provides at least path to jsontestrunner executable.' ) + sys.exit( 1 ) + + jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) + if len(args) > 1: + input_path = os.path.normpath( os.path.abspath( args[1] ) ) + else: + input_path = None + status = runAllTests( jsontest_executable_path, input_path, + use_valgrind=options.valgrind, with_json_checker=options.with_json_checker ) + sys.exit( status ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/test/rununittests.py b/tags/jsoncpp/0.6.0-rc2/test/rununittests.py new file mode 100644 index 0000000..ccc54e4 --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/test/rununittests.py @@ -0,0 +1,73 @@ +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() diff --git a/tags/jsoncpp/0.6.0-rc2/version b/tags/jsoncpp/0.6.0-rc2/version new file mode 100644 index 0000000..673adfb --- /dev/null +++ b/tags/jsoncpp/0.6.0-rc2/version @@ -0,0 +1 @@ +0.6.0-rc2 \ No newline at end of file From 3845731d8b423c86084ae9d924fed9d8fed592c2 Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 2 May 2011 22:07:18 +0000 Subject: [PATCH 192/268] Released 0.6.0-rc2 git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@192 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/version b/trunk/jsoncpp/version index 673adfb..7defe1e 100644 --- a/trunk/jsoncpp/version +++ b/trunk/jsoncpp/version @@ -1 +1 @@ -0.6.0-rc2 \ No newline at end of file +0.6.0-dev \ No newline at end of file From 80d69e62a4a15ab6c533c5ecfb3c5388b0f27b61 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 00:41:12 +0000 Subject: [PATCH 193/268] Fixed a hard to debug crash on OS X related to sscanf format strings. See here for more info: http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@193 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 8bb0304..f3a5c41 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -610,17 +610,25 @@ Reader::decodeDouble( Token &token ) const int bufferSize = 32; int count; int length = int(token.end_ - token.start_); + + // Avoid using a string constant for the format control string given to + // sscanf, as this can cause hard to debug crashes on OS X. See here for more + // info: + // + // http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html + char format[] = "%lf"; + if ( length <= bufferSize ) { Char buffer[bufferSize+1]; memcpy( buffer, token.start_, length ); buffer[length] = 0; - count = sscanf( buffer, "%lf", &value ); + count = sscanf( buffer, format, &value ); } else { std::string buffer( token.start_, token.end_ ); - count = sscanf( buffer.c_str(), "%lf", &value ); + count = sscanf( buffer.c_str(), format, &value ); } if ( count != 1 ) From cae180e56788acf54b4e1a641a1fa7e40201bbc7 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 00:42:15 +0000 Subject: [PATCH 194/268] Removed an unused typedef. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@194 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_batchallocator.h | 3 --- 1 file changed, 3 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_batchallocator.h b/trunk/jsoncpp/src/lib_json/json_batchallocator.h index 173e2ed..2a7c024 100644 --- a/trunk/jsoncpp/src/lib_json/json_batchallocator.h +++ b/trunk/jsoncpp/src/lib_json/json_batchallocator.h @@ -30,8 +30,6 @@ template Date: Tue, 24 May 2011 00:42:58 +0000 Subject: [PATCH 195/268] Fixed constructor initializer list order warnings/errors. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@195 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- .../src/lib_json/json_internalarray.inl | 2 +- trunk/jsoncpp/src/lib_json/json_value.cpp | 26 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl index 3a532ad..c6927c3 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalarray.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalarray.inl @@ -258,8 +258,8 @@ ValueInternalArray::ValueInternalArray() ValueInternalArray::ValueInternalArray( const ValueInternalArray &other ) : pages_( 0 ) - , pageCount_( 0 ) , size_( other.size_ ) + , pageCount_( 0 ) { PageIndex minNewPages = other.size_ / itemsPerPage; arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages ); diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index ff98f63..83170f8 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -236,10 +236,10 @@ Value::CZString::isStaticString() const Value::Value( ValueType type ) : type_( type ) , allocated_( 0 ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { switch ( type ) { @@ -280,20 +280,20 @@ Value::Value( ValueType type ) #if defined(JSON_HAS_INT64) Value::Value( UInt value ) : type_( uintValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.uint_ = value; } Value::Value( Int value ) : type_( intValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.int_ = value; } @@ -303,10 +303,10 @@ Value::Value( Int value ) Value::Value( Int64 value ) : type_( intValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.int_ = value; } @@ -314,20 +314,20 @@ Value::Value( Int64 value ) Value::Value( UInt64 value ) : type_( uintValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.uint_ = value; } Value::Value( double value ) : type_( realValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.real_ = value; } @@ -335,10 +335,10 @@ Value::Value( double value ) Value::Value( const char *value ) : type_( stringValue ) , allocated_( true ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.string_ = duplicateStringValue( value ); } @@ -348,10 +348,10 @@ Value::Value( const char *beginValue, const char *endValue ) : type_( stringValue ) , allocated_( true ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.string_ = duplicateStringValue( beginValue, (unsigned int)(endValue - beginValue) ); @@ -361,10 +361,10 @@ Value::Value( const char *beginValue, Value::Value( const std::string &value ) : type_( stringValue ) , allocated_( true ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.string_ = duplicateStringValue( value.c_str(), (unsigned int)value.length() ); @@ -374,10 +374,10 @@ Value::Value( const std::string &value ) Value::Value( const StaticString &value ) : type_( stringValue ) , allocated_( false ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.string_ = const_cast( value.c_str() ); } @@ -387,10 +387,10 @@ Value::Value( const StaticString &value ) Value::Value( const CppTL::ConstString &value ) : type_( stringValue ) , allocated_( true ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.string_ = duplicateStringValue( value, value.length() ); } @@ -398,10 +398,10 @@ Value::Value( const CppTL::ConstString &value ) Value::Value( bool value ) : type_( booleanValue ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { value_.bool_ = value; } @@ -409,10 +409,10 @@ Value::Value( bool value ) Value::Value( const Value &other ) : type_( other.type_ ) - , comments_( 0 ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif + , comments_( 0 ) { switch ( type_ ) { From bc501357ec1a54af6cc63155d907e81ca432b0e7 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 00:43:30 +0000 Subject: [PATCH 196/268] Updated a cast to use a more appropriate type. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@196 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 83170f8..c48455b 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -926,7 +926,7 @@ Value::isConvertibleTo( ValueType other ) const || other == booleanValue; case uintValue: return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (unsigned)maxInt ) + || ( other == intValue && value_.uint_ <= (LargestUInt)maxInt ) || other == uintValue || other == realValue || other == stringValue From 5b8632018e99bb7dc482fb21fc426e50c31d41d0 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 00:43:59 +0000 Subject: [PATCH 197/268] Made two security fixes. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@197 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 5 +++++ trunk/jsoncpp/src/lib_json/json_value.cpp | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index f3a5c41..76e12f9 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -611,6 +611,11 @@ Reader::decodeDouble( Token &token ) int count; int length = int(token.end_ - token.start_); + // Sanity check to avoid buffer overflow exploits. + if (length < 0) { + return addError( "Unable to parse token length", token ); + } + // Avoid using a string constant for the format control string given to // sscanf, as this can cause hard to debug crashes on OS X. See here for more // info: diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index c48455b..dd60a50 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -56,6 +56,12 @@ duplicateStringValue( const char *value, { if ( length == unknown ) length = (unsigned int)strlen(value); + + // Avoid an integer overflow in the call to malloc below by limiting length + // to a sane value. + if (length >= (unsigned)Value::maxInt) + length = Value::maxInt - 1; + char *newString = static_cast( malloc( length + 1 ) ); JSON_ASSERT_MESSAGE( newString != 0, "Failed to allocate string value buffer" ); memcpy( newString, value, length ); From e876cfa370119822cffa28ddd20caef86e1ba036 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 01:03:22 +0000 Subject: [PATCH 198/268] Centralized assertion macros and made them obey JSON_USE_EXCEPTION. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@198 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 23 +++++++++++++++++++ .../src/lib_json/json_internalarray.inl | 6 ++--- trunk/jsoncpp/src/lib_json/json_reader.cpp | 4 ++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 4 +--- 4 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 trunk/jsoncpp/include/json/assertions.h diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h new file mode 100644 index 0000000..005ff71 --- /dev/null +++ b/trunk/jsoncpp/include/json/assertions.h @@ -0,0 +1,23 @@ +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED +# define CPPTL_JSON_ASSERTIONS_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +# include +#endif // if !defined(JSON_IS_AMALGAMATION) + +#if defined(JSON_USE_EXCEPTION) +#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw +#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); +#else // defined(JSON_USE_EXCEPTION) +#define JSON_ASSERT( condition ) assert( condition ); +#define JSON_FAIL_MESSAGE( message ) { std::cerr << message; exit(123); } +#endif + +#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) { JSON_FAIL_MESSAGE( message ) } + +#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED diff --git a/trunk/jsoncpp/src/lib_json/json_internalarray.inl b/trunk/jsoncpp/src/lib_json/json_internalarray.inl index c6927c3..5e8b8ef 100644 --- a/trunk/jsoncpp/src/lib_json/json_internalarray.inl +++ b/trunk/jsoncpp/src/lib_json/json_internalarray.inl @@ -53,8 +53,7 @@ public: // overridden from ValueArrayAllocator if ( minNewIndexCount > newIndexCount ) newIndexCount = minNewIndexCount; void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); + JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc."); indexCount = newIndexCount; indexes = static_cast( newIndexes ); } @@ -117,8 +116,7 @@ public: // overridden from ValueArrayAllocator if ( minNewIndexCount > newIndexCount ) newIndexCount = minNewIndexCount; void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount ); - if ( !newIndexes ) - throw std::bad_alloc(); + JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc."); indexCount = newIndexCount; indexes = static_cast( newIndexes ); } diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 76e12f9..603fd76 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -4,6 +4,7 @@ // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE #if !defined(JSON_IS_AMALGAMATION) +# include # include # include # include "json_tool.h" @@ -884,8 +885,7 @@ std::istream& operator>>( std::istream &sin, Value &root ) { Json::Reader reader; bool ok = reader.parse(sin, root, true); - //JSON_ASSERT( ok ); - if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages()); + if (!ok) JSON_FAIL_MESSAGE(reader.getFormattedErrorMessages()); return sin; } diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index dd60a50..f314ba3 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -4,6 +4,7 @@ // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE #if !defined(JSON_IS_AMALGAMATION) +# include # include # include # ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR @@ -21,9 +22,6 @@ #include // size_t #define JSON_ASSERT_UNREACHABLE assert( false ) -#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw -#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); -#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) JSON_FAIL_MESSAGE( message ) namespace Json { From afdae96103c9ff12a87525a4e81c3e5bd5df4477 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 03:18:02 +0000 Subject: [PATCH 199/268] Fixed a bunch of compilation errors when JSON_HAS_INT64 is set. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@199 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/value.h | 4 ++++ trunk/jsoncpp/src/lib_json/json_value.cpp | 7 ++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 32e3455..e3869e5 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -151,12 +151,14 @@ namespace Json { /// Maximum unsigned int value that can be stored in a Json::Value. static const UInt maxUInt; +# if defined(JSON_HAS_INT64) /// Minimum signed 64 bits int value that can be stored in a Json::Value. static const Int64 minInt64; /// Maximum signed 64 bits int value that can be stored in a Json::Value. static const Int64 maxInt64; /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. static const UInt64 maxUInt64; +#endif // defined(JSON_HAS_INT64) private: #ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION @@ -265,8 +267,10 @@ namespace Json { # endif Int asInt() const; UInt asUInt() const; +#if defined(JSON_HAS_INT64) Int64 asInt64() const; UInt64 asUInt64() const; +#endif // if defined(JSON_HAS_INT64) LargestInt asLargestInt() const; LargestUInt asLargestUInt() const; float asFloat() const; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index f314ba3..d4a2a65 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -29,9 +29,11 @@ const Value Value::null; const Int Value::minInt = Int( ~(UInt(-1)/2) ); const Int Value::maxInt = Int( UInt(-1)/2 ); const UInt Value::maxUInt = UInt(-1); +# if defined(JSON_HAS_INT64) const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); const UInt64 Value::maxUInt64 = UInt64(-1); +#endif // defined(JSON_HAS_INT64) const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); const LargestUInt Value::maxLargestUInt = LargestUInt(-1); @@ -281,7 +283,6 @@ Value::Value( ValueType type ) } -#if defined(JSON_HAS_INT64) Value::Value( UInt value ) : type_( uintValue ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -302,9 +303,8 @@ Value::Value( Int value ) value_.int_ = value; } -#endif // if defined(JSON_HAS_INT64) - +# if defined(JSON_HAS_INT64) Value::Value( Int64 value ) : type_( intValue ) # ifdef JSON_VALUE_USE_INTERNAL_MAP @@ -325,6 +325,7 @@ Value::Value( UInt64 value ) { value_.uint_ = value; } +#endif // defined(JSON_HAS_INT64) Value::Value( double value ) : type_( realValue ) From c3f78550820e100b3431a355789e2b09859a9113 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 03:19:50 +0000 Subject: [PATCH 200/268] Added some test cases that catch a parsing bug. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@200 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/data/test_real_08.expected | 1 + trunk/jsoncpp/test/data/test_real_08.json | 4 ++++ trunk/jsoncpp/test/data/test_real_09.expected | 1 + trunk/jsoncpp/test/data/test_real_09.json | 4 ++++ trunk/jsoncpp/test/data/test_real_10.expected | 1 + trunk/jsoncpp/test/data/test_real_10.json | 4 ++++ trunk/jsoncpp/test/data/test_real_11.expected | 1 + trunk/jsoncpp/test/data/test_real_11.json | 4 ++++ 8 files changed, 20 insertions(+) create mode 100644 trunk/jsoncpp/test/data/test_real_08.expected create mode 100644 trunk/jsoncpp/test/data/test_real_08.json create mode 100644 trunk/jsoncpp/test/data/test_real_09.expected create mode 100644 trunk/jsoncpp/test/data/test_real_09.json create mode 100644 trunk/jsoncpp/test/data/test_real_10.expected create mode 100644 trunk/jsoncpp/test/data/test_real_10.json create mode 100644 trunk/jsoncpp/test/data/test_real_11.expected create mode 100644 trunk/jsoncpp/test/data/test_real_11.json diff --git a/trunk/jsoncpp/test/data/test_real_08.expected b/trunk/jsoncpp/test/data/test_real_08.expected new file mode 100644 index 0000000..9a5f062 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_08.expected @@ -0,0 +1 @@ +.=4300000001 diff --git a/trunk/jsoncpp/test/data/test_real_08.json b/trunk/jsoncpp/test/data/test_real_08.json new file mode 100644 index 0000000..cca950d --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_08.json @@ -0,0 +1,4 @@ +// Out of 32-bit integer range, switch to double in 32-bit mode. Length the +// same as UINT_MAX in base 10 and digit less than UINT_MAX's last digit in +// order to catch a bug in the parsing code. +4300000001 diff --git a/trunk/jsoncpp/test/data/test_real_09.expected b/trunk/jsoncpp/test/data/test_real_09.expected new file mode 100644 index 0000000..ee2e5ef --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_09.expected @@ -0,0 +1 @@ +.=19000000000000000001 diff --git a/trunk/jsoncpp/test/data/test_real_09.json b/trunk/jsoncpp/test/data/test_real_09.json new file mode 100644 index 0000000..e65d50c --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_09.json @@ -0,0 +1,4 @@ +// Out of 64-bit integer range, switch to double in all modes. Length the same +// as ULONG_MAX in base 10 and digit less than ULONG_MAX's last digit in order +// to catch a bug in the parsing code. +19000000000000000001 diff --git a/trunk/jsoncpp/test/data/test_real_10.expected b/trunk/jsoncpp/test/data/test_real_10.expected new file mode 100644 index 0000000..01126bf --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_10.expected @@ -0,0 +1 @@ +.=-2200000001 diff --git a/trunk/jsoncpp/test/data/test_real_10.json b/trunk/jsoncpp/test/data/test_real_10.json new file mode 100644 index 0000000..a6a8bce --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_10.json @@ -0,0 +1,4 @@ +// Out of 32-bit signed integer range, switch to double in all modes. Length +// the same as INT_MIN in base 10 and digit less than INT_MIN's last digit in +// order to catch a bug in the parsing code. +-2200000001 diff --git a/trunk/jsoncpp/test/data/test_real_11.expected b/trunk/jsoncpp/test/data/test_real_11.expected new file mode 100644 index 0000000..83d3cc3 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_11.expected @@ -0,0 +1 @@ +.=-9300000000000000001 diff --git a/trunk/jsoncpp/test/data/test_real_11.json b/trunk/jsoncpp/test/data/test_real_11.json new file mode 100644 index 0000000..63cdb36 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_11.json @@ -0,0 +1,4 @@ +// Out of 64-bit signed integer range, switch to double in all modes. Length +// the same as LONG_MIN in base 10 and digit less than LONG_MIN's last digit in +// order to catch a bug in the parsing code. +-9300000000000000001 From 4eedf395acd6d9e76a2dcd23ec990e0e82d514ee Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 03:59:24 +0000 Subject: [PATCH 201/268] Fixed a parsing bug in decodeNumber, updating the failing test cases to be correct in the process. (The test cases incorrectly used exact integers instead of scientific notation.) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@201 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 13 +++++++------ trunk/jsoncpp/test/data/test_real_09.expected | 2 +- trunk/jsoncpp/test/data/test_real_11.expected | 2 +- trunk/jsoncpp/test/data/test_real_12.expected | 1 + trunk/jsoncpp/test/data/test_real_12.json | 2 ++ 5 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 trunk/jsoncpp/test/data/test_real_12.expected create mode 100644 trunk/jsoncpp/test/data/test_real_12.json diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 603fd76..238a3c5 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -573,8 +573,6 @@ Reader::decodeNumber( Token &token ) Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt) : Value::maxLargestUInt; Value::LargestUInt threshold = maxIntegerValue / 10; - Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 ); - assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 ); Value::LargestUInt value = 0; while ( current < token.end_ ) { @@ -584,10 +582,13 @@ Reader::decodeNumber( Token &token ) Value::UInt digit(c - '0'); if ( value >= threshold ) { - // If the current digit is not the last one, or if it is - // greater than the last digit of the maximum integer value, - // the parse the number as a double. - if ( current != token.end_ || digit > lastDigitThreshold ) + // We've hit or exceeded the max value divided by 10 (rounded down). If + // a) we've only just touched the limit, b) this is the last digit, and + // c) it's small enough to fit in that rounding delta, we're okay. + // Otherwise treat this number as a double to avoid overflow. + if (value > threshold || + current != token.end_ || + digit > maxIntegerValue % 10) { return decodeDouble( token ); } diff --git a/trunk/jsoncpp/test/data/test_real_09.expected b/trunk/jsoncpp/test/data/test_real_09.expected index ee2e5ef..6da815e 100644 --- a/trunk/jsoncpp/test/data/test_real_09.expected +++ b/trunk/jsoncpp/test/data/test_real_09.expected @@ -1 +1 @@ -.=19000000000000000001 +.=1.9e+19 diff --git a/trunk/jsoncpp/test/data/test_real_11.expected b/trunk/jsoncpp/test/data/test_real_11.expected index 83d3cc3..17f4187 100644 --- a/trunk/jsoncpp/test/data/test_real_11.expected +++ b/trunk/jsoncpp/test/data/test_real_11.expected @@ -1 +1 @@ -.=-9300000000000000001 +.=-9.3e+18 diff --git a/trunk/jsoncpp/test/data/test_real_12.expected b/trunk/jsoncpp/test/data/test_real_12.expected new file mode 100644 index 0000000..a000319 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_12.expected @@ -0,0 +1 @@ +.=1.844674407370955e+19 diff --git a/trunk/jsoncpp/test/data/test_real_12.json b/trunk/jsoncpp/test/data/test_real_12.json new file mode 100644 index 0000000..0a13eed --- /dev/null +++ b/trunk/jsoncpp/test/data/test_real_12.json @@ -0,0 +1,2 @@ +// 2^64 -> switch to double. +18446744073709551616 From 6fcf4b1495fce473a5c2f1800a84f58e8b1d2d5e Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 06:27:36 +0000 Subject: [PATCH 202/268] Fixed a missing include error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@202 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h index 005ff71..eb87010 100644 --- a/trunk/jsoncpp/include/json/assertions.h +++ b/trunk/jsoncpp/include/json/assertions.h @@ -6,6 +6,9 @@ #ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED # define CPPTL_JSON_ASSERTIONS_H_INCLUDED +#include +#include + #if !defined(JSON_IS_AMALGAMATION) # include #endif // if !defined(JSON_IS_AMALGAMATION) From f16b22e2561bb84e1646768ca4dedd9189d1be47 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 23:05:56 +0000 Subject: [PATCH 203/268] Added a .gitignore file, for ease of use with git-svn. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@203 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.gitignore | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 trunk/jsoncpp/.gitignore diff --git a/trunk/jsoncpp/.gitignore b/trunk/jsoncpp/.gitignore new file mode 100644 index 0000000..ce4932a --- /dev/null +++ b/trunk/jsoncpp/.gitignore @@ -0,0 +1,10 @@ +*.pyc +*.swp + +*.actual +*.actual-rewrite +*.process-output +*.rewrite +bin/ +buildscons/ +libs/ From 4d224d8a3c4ad137a05d916a6729d84195001e28 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Tue, 24 May 2011 23:08:59 +0000 Subject: [PATCH 204/268] Fixed a "comparison between signed and unsigned" warning/error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@204 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index d4a2a65..7c450ba 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -729,7 +729,7 @@ Value::asUInt() const return 0; case intValue: JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( value_.int_ <= maxUInt, "signed integer out of UInt range" ); + JSON_ASSERT_MESSAGE( UInt(value_.int_) <= maxUInt, "signed integer out of UInt range" ); return UInt(value_.int_); case uintValue: JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); From 727c9e49aea615ef0390a01cc2fd1f6d4cbf4ba6 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 00:39:17 +0000 Subject: [PATCH 205/268] Fixed a whitespace problem. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@205 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 3275219..46c5452 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -45,7 +45,7 @@ struct ValueTest : JsonTest::TestCase , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) , unsignedInteger_( 34567890u ) , real_( 1234.56789 ) - , float_( 0.00390625f ) + , float_( 0.00390625f ) , emptyString_( "" ) , string1_( "a" ) , string_( "sometext with space" ) From 9e65e9478144c7d7a36f81b446f87e96c79a19a1 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 00:39:55 +0000 Subject: [PATCH 206/268] Renamed test cases to make more sense with the upcoming new behavior of isFoo methods. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@206 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 33 ++++++++++++------------ 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 46c5452..fb00ccc 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -100,7 +100,7 @@ JSONTEST_FIXTURE( ValueTest, size ) } -JSONTEST_FIXTURE( ValueTest, isObject ) +JSONTEST_FIXTURE( ValueTest, objectTypes ) { IsCheck checks; checks.isObject_ = true; @@ -109,7 +109,7 @@ JSONTEST_FIXTURE( ValueTest, isObject ) } -JSONTEST_FIXTURE( ValueTest, isArray ) +JSONTEST_FIXTURE( ValueTest, arrayTypes ) { IsCheck checks; checks.isArray_ = true; @@ -118,7 +118,7 @@ JSONTEST_FIXTURE( ValueTest, isArray ) } -JSONTEST_FIXTURE( ValueTest, isNull ) +JSONTEST_FIXTURE( ValueTest, nullTypes ) { IsCheck checks; checks.isNull_ = true; @@ -128,7 +128,7 @@ JSONTEST_FIXTURE( ValueTest, isNull ) } -JSONTEST_FIXTURE( ValueTest, isString ) +JSONTEST_FIXTURE( ValueTest, stringTypes ) { IsCheck checks; checks.isString_ = true; @@ -138,7 +138,7 @@ JSONTEST_FIXTURE( ValueTest, isString ) } -JSONTEST_FIXTURE( ValueTest, isBool ) +JSONTEST_FIXTURE( ValueTest, boolTypes ) { IsCheck checks; checks.isBool_ = true; @@ -149,7 +149,7 @@ JSONTEST_FIXTURE( ValueTest, isBool ) } -JSONTEST_FIXTURE( ValueTest, isDouble ) +JSONTEST_FIXTURE( ValueTest, doubleTypes ) { IsCheck checks; checks.isDouble_ = true; @@ -158,7 +158,7 @@ JSONTEST_FIXTURE( ValueTest, isDouble ) } -JSONTEST_FIXTURE( ValueTest, isInt ) +JSONTEST_FIXTURE( ValueTest, intTypes ) { IsCheck checks; checks.isInt_ = true; @@ -168,7 +168,7 @@ JSONTEST_FIXTURE( ValueTest, isInt ) } -JSONTEST_FIXTURE( ValueTest, isUInt ) +JSONTEST_FIXTURE( ValueTest, uintTypes ) { IsCheck checks; checks.isUInt_ = true; @@ -406,15 +406,14 @@ int main( int argc, const char *argv[] ) { JsonTest::Runner runner; JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, objectTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, arrayTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, boolTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, intTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, uintTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, doubleTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, stringTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nullTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); From 2b5c59b400eb337dc029bf596c8cfd33d34eb714 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:03:29 +0000 Subject: [PATCH 207/268] Added some missing checks. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@207 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index fb00ccc..6154395 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -93,6 +93,8 @@ JSONTEST_FIXTURE( ValueTest, size ) JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) ); JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) ); JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(unsignedInteger_, 0) ); + JSONTEST_ASSERT_PRED( checkMemberCount(smallUnsignedInteger_, 0) ); JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) ); JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) ); JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) ); From 305ff561a4aa24b71501b0011fbaf13e92e16953 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:04:07 +0000 Subject: [PATCH 208/268] Greatly fleshed out numeric type tests. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@208 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 85 +++++++++++++++++++----- 1 file changed, 68 insertions(+), 17 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 6154395..38d9658 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -3,14 +3,20 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE +#include +#include + #include #include "jsontest.h" - // TODO: // - boolean value returns that they are integral. Should not be. // - unsigned integer in integer range are not considered to be valid integer. Should check range. +// Make numeric limits more convenient to talk about. +#define kint32max std::numeric_limits::max() +#define kint32min std::numeric_limits::min() +#define kuint32max std::numeric_limits::max() // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -151,33 +157,79 @@ JSONTEST_FIXTURE( ValueTest, boolTypes ) } -JSONTEST_FIXTURE( ValueTest, doubleTypes ) +JSONTEST_FIXTURE( ValueTest, integerTypes ) { IsCheck checks; - checks.isDouble_ = true; + + // Zero (signed constructor arg) + checks = IsCheck(); + checks.isInt_ = true; checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( real_, checks ) ); -} + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(0), checks ) ); + // Zero (unsigned constructor arg) + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(0u), checks ) ); -JSONTEST_FIXTURE( ValueTest, intTypes ) -{ - IsCheck checks; + // 2^20 (signed constructor arg) + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) ); + JSONTEST_ASSERT_PRED( checkIs( Json::Value(1 << 20), checks ) ); + + // 2^20 (unsigned constructor arg) + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(1U << 20), checks ) ); + + // -2^20 + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(-(1 << 20)), checks ) ); + + // int32 max + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(kint32max), checks ) ); + + // int32 min + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(kint32min), checks ) ); + + // uint32 max + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(kuint32max), checks ) ); } -JSONTEST_FIXTURE( ValueTest, uintTypes ) +JSONTEST_FIXTURE( ValueTest, nonIntegerTypes ) { IsCheck checks; - checks.isUInt_ = true; + checks.isDouble_ = true; checks.isNumeric_ = true; - checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) ); - JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) ); + + // Positive number + JSONTEST_ASSERT_PRED( checkIs( Json::Value(0.1), checks ) ); + + // Negative number + JSONTEST_ASSERT_PRED( checkIs( Json::Value(-0.1), checks ) ); } @@ -411,9 +463,8 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, objectTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, arrayTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, boolTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, intTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, uintTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, doubleTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, integerTypes ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nonIntegerTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, stringTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nullTypes ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); From 55d77716d77b9a537b98187f269319e8b93bd56b Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:23:08 +0000 Subject: [PATCH 209/268] Added tests for 64-bit integers. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@209 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 45 ++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 38d9658..bd36229 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -6,6 +6,7 @@ #include #include +#include #include #include "jsontest.h" @@ -17,6 +18,9 @@ #define kint32max std::numeric_limits::max() #define kint32min std::numeric_limits::min() #define kuint32max std::numeric_limits::max() +#define kint64max std::numeric_limits::max() +#define kint64min std::numeric_limits::min() +#define kuint64max std::numeric_limits::max() // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -216,6 +220,47 @@ JSONTEST_FIXTURE( ValueTest, integerTypes ) checks.isNumeric_ = true; checks.isIntegral_ = true; JSONTEST_ASSERT_PRED( checkIs( Json::Value(kuint32max), checks ) ); + +#ifdef JSON_NO_INT64 + // int64 max + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kint64max)), checks ) ); + + // int64 min + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kint64min)), checks ) ); + + // uint64 max + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kuint64max)), checks ) ); +#else // ifdef JSON_NO_INT64 + // int64 max + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::Int64(kint64max)), checks ) ); + + // int64 min + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::Int64(kint64min)), checks ) ); + + // uint64 max + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::UInt64(kuint64max)), checks ) ); +#endif } From e9539ecf05107ad025ba40a8d13d98f36b10f25c Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:23:47 +0000 Subject: [PATCH 210/268] Fixed some whitespace. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@210 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index bd36229..e5bb41e 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -280,19 +280,19 @@ JSONTEST_FIXTURE( ValueTest, nonIntegerTypes ) JSONTEST_FIXTURE( ValueTest, accessArray ) { - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; } JSONTEST_FIXTURE( ValueTest, asFloat ) { - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; + JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; } void From 2c1ff8f640edacf165606a173c30362c2a604f4b Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:24:23 +0000 Subject: [PATCH 211/268] Gave tests more general names in preparation for making them much more comprehensive. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@211 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 62 +++++++++++------------- 1 file changed, 27 insertions(+), 35 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index e5bb41e..5807b5f 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -95,7 +95,7 @@ struct ValueTest : JsonTest::TestCase }; -JSONTEST_FIXTURE( ValueTest, size ) +JSONTEST_FIXTURE( ValueTest, memberCount ) { JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) ); @@ -112,7 +112,7 @@ JSONTEST_FIXTURE( ValueTest, size ) } -JSONTEST_FIXTURE( ValueTest, objectTypes ) +JSONTEST_FIXTURE( ValueTest, objects ) { IsCheck checks; checks.isObject_ = true; @@ -121,16 +121,27 @@ JSONTEST_FIXTURE( ValueTest, objectTypes ) } -JSONTEST_FIXTURE( ValueTest, arrayTypes ) +JSONTEST_FIXTURE( ValueTest, arrays ) { + // Types IsCheck checks; checks.isArray_ = true; JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); + + // Access non-const array + const unsigned int index0 = 0; + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ); + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ); + + // Access const array + const Json::Value &constArray = array1_; + JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ); + JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ); } -JSONTEST_FIXTURE( ValueTest, nullTypes ) +JSONTEST_FIXTURE( ValueTest, null ) { IsCheck checks; checks.isNull_ = true; @@ -140,7 +151,7 @@ JSONTEST_FIXTURE( ValueTest, nullTypes ) } -JSONTEST_FIXTURE( ValueTest, stringTypes ) +JSONTEST_FIXTURE( ValueTest, strings ) { IsCheck checks; checks.isString_ = true; @@ -150,7 +161,7 @@ JSONTEST_FIXTURE( ValueTest, stringTypes ) } -JSONTEST_FIXTURE( ValueTest, boolTypes ) +JSONTEST_FIXTURE( ValueTest, bools ) { IsCheck checks; checks.isBool_ = true; @@ -161,7 +172,7 @@ JSONTEST_FIXTURE( ValueTest, boolTypes ) } -JSONTEST_FIXTURE( ValueTest, integerTypes ) +JSONTEST_FIXTURE( ValueTest, integers ) { IsCheck checks; @@ -264,7 +275,7 @@ JSONTEST_FIXTURE( ValueTest, integerTypes ) } -JSONTEST_FIXTURE( ValueTest, nonIntegerTypes ) +JSONTEST_FIXTURE( ValueTest, nonIntegers ) { IsCheck checks; checks.isDouble_ = true; @@ -278,23 +289,6 @@ JSONTEST_FIXTURE( ValueTest, nonIntegerTypes ) } -JSONTEST_FIXTURE( ValueTest, accessArray ) -{ - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ) << "Json::Value::operator[ArrayIndex]"; - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ) << "Json::Value::operator[int]"; - - const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ) << "Json::Value::operator[ArrayIndex] const"; - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ) << "Json::Value::operator[int] const"; -} - - -JSONTEST_FIXTURE( ValueTest, asFloat ) -{ - JSONTEST_ASSERT_EQUAL( 0.00390625f, float_.asFloat() ) << "Json::Value::asFloat()"; -} - void ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ) { @@ -504,16 +498,14 @@ ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) int main( int argc, const char *argv[] ) { JsonTest::Runner runner; - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, objectTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, arrayTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, boolTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, integerTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nonIntegerTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, stringTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nullTypes ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, accessArray ); - JSONTEST_REGISTER_FIXTURE( runner, ValueTest, asFloat ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, memberCount ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, objects ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, arrays ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, null ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, strings ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, bools ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, integers ); + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, nonIntegers ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareNull ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareInt ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareUInt ); From de67c4a6bac7fcf80bb9f45b9514cb18345e0cd9 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:46:50 +0000 Subject: [PATCH 212/268] Made tests more comprehensive. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@212 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 224 ++++++++++++++++++++--- 1 file changed, 201 insertions(+), 23 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 5807b5f..26638f2 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -114,30 +114,49 @@ JSONTEST_FIXTURE( ValueTest, memberCount ) JSONTEST_FIXTURE( ValueTest, objects ) { + // Types IsCheck checks; checks.isObject_ = true; JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); + + // Access through const reference + const Json::Value &constObject = object1_; + + JSONTEST_ASSERT( Json::Value(1234) == constObject["id"] ); + JSONTEST_ASSERT( Json::Value() == constObject["unknown id"] ); + + // Access through non-const reference + JSONTEST_ASSERT( Json::Value(1234) == object1_["id"] ); + JSONTEST_ASSERT( Json::Value() == object1_["unknown id"] ); + + object1_["some other id"] = "foo"; + JSONTEST_ASSERT( Json::Value("foo") == object1_["some other id"] ); } JSONTEST_FIXTURE( ValueTest, arrays ) { + const unsigned int index0 = 0; + // Types IsCheck checks; checks.isArray_ = true; JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); - // Access non-const array - const unsigned int index0 = 0; - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ); - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ); - - // Access const array + // Access through const reference const Json::Value &constArray = array1_; JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ); JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ); + + // Access through non-const reference + JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ); + JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ); + + array1_[2] = Json::Value(17); + JSONTEST_ASSERT( Json::Value() == array1_[1] ); + JSONTEST_ASSERT( Json::Value(17) == array1_[2] ); } @@ -158,6 +177,9 @@ JSONTEST_FIXTURE( ValueTest, strings ) JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); + + JSONTEST_ASSERT( std::string("a") == string1_.asString()); + JSONTEST_ASSERT( std::string("a") == string1_.asCString()); } @@ -169,108 +191,249 @@ JSONTEST_FIXTURE( ValueTest, bools ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); + + JSONTEST_ASSERT( true == true_.asBool()); + JSONTEST_ASSERT( false == false_.asBool()); } JSONTEST_FIXTURE( ValueTest, integers ) { IsCheck checks; + Json::Value val; // Zero (signed constructor arg) + val = Json::Value(0); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(0), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); // Zero (unsigned constructor arg) + val = Json::Value(0u); + checks = IsCheck(); checks.isUInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(0u), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); + + // Zero (floating-point constructor arg) + val = Json::Value(0.0); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); // 2^20 (signed constructor arg) + val = Json::Value(1 << 20); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(1 << 20), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1 << 20) == val.asInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); + JSONTEST_ASSERT( (1 << 20) == val.asUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asDouble()); + JSONTEST_ASSERT( (1 << 20) == val.asFloat()); // 2^20 (unsigned constructor arg) + val = Json::Value(1u << 20); + checks = IsCheck(); checks.isUInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(1U << 20), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1 << 20) == val.asInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); + JSONTEST_ASSERT( (1 << 20) == val.asUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asDouble()); + JSONTEST_ASSERT( (1 << 20) == val.asFloat()); + + // 2^20 (floating-point constructor arg) + val = Json::Value((1 << 20) / 1.0); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1 << 20) == val.asInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); + JSONTEST_ASSERT( (1 << 20) == val.asUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); + JSONTEST_ASSERT( (1 << 20) == val.asDouble()); + JSONTEST_ASSERT( (1 << 20) == val.asFloat()); // -2^20 + val = Json::Value(-(1 << 20)); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(-(1 << 20)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( -(1 << 20) == val.asInt()); + JSONTEST_ASSERT( -(1 << 20) == val.asLargestInt()); + JSONTEST_ASSERT( -(1 << 20) == val.asDouble()); + JSONTEST_ASSERT( -(1 << 20) == val.asFloat()); // int32 max + val = Json::Value(kint32max); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(kint32max), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( kint32max == val.asInt()); + JSONTEST_ASSERT( kint32max == val.asLargestInt()); + JSONTEST_ASSERT( kint32max == val.asUInt()); + JSONTEST_ASSERT( kint32max == val.asLargestUInt()); + JSONTEST_ASSERT( kint32max == val.asDouble()); + JSONTEST_ASSERT( kint32max == val.asFloat()); // int32 min + val = Json::Value(kint32min); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(kint32min), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( kint32min == val.asInt()); + JSONTEST_ASSERT( kint32min == val.asLargestInt()); + JSONTEST_ASSERT( kint32min == val.asDouble()); + JSONTEST_ASSERT( kint32min == val.asFloat()); // uint32 max + val = Json::Value(kuint32max); + checks = IsCheck(); checks.isUInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(kuint32max), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + +#ifdef JSON_NO_INT64 + JSONTEST_ASSERT( kuint32max == val.asLargestInt()); +#endif + JSONTEST_ASSERT( kuint32max == val.asUInt()); + JSONTEST_ASSERT( kuint32max == val.asLargestUInt()); + JSONTEST_ASSERT( kuint32max == val.asDouble()); + JSONTEST_ASSERT( kuint32max == val.asFloat()); #ifdef JSON_NO_INT64 // int64 max + val = Json::Value(double(kint64max)); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kint64max)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( double(kint64max) == val.asDouble()); + JSONTEST_ASSERT( float(kint64max) == val.asFloat()); // int64 min + val = Json::Value(double(kint64min)); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kint64min)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( double(kint64min) == val.asDouble()); + JSONTEST_ASSERT( float(kint64min) == val.asFloat()); // uint64 max + val = Json::Value(double(kuint64max)); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(double(kuint64max)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); + JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); #else // ifdef JSON_NO_INT64 // int64 max + val = Json::Value(Json::Int64(kint64max)); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::Int64(kint64max)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( kint64max == val.asInt64()); + JSONTEST_ASSERT( kint64max == val.asLargestInt()); + JSONTEST_ASSERT( kint64max == val.asUInt64()); + JSONTEST_ASSERT( kint64max == val.asLargestUInt()); + JSONTEST_ASSERT( double(kint64max) == val.asDouble()); + JSONTEST_ASSERT( float(kint64max) == val.asFloat()); // int64 min + val = Json::Value(Json::Int64(kint64min)); + checks = IsCheck(); checks.isInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::Int64(kint64min)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( kint64min == val.asInt64()); + JSONTEST_ASSERT( kint64min == val.asLargestInt()); + JSONTEST_ASSERT( double(kint64min) == val.asDouble()); + JSONTEST_ASSERT( float(kint64min) == val.asFloat()); // uint64 max + val = Json::Value(Json::UInt64(kuint64max)); + checks = IsCheck(); checks.isUInt_ = true; checks.isNumeric_ = true; checks.isIntegral_ = true; - JSONTEST_ASSERT_PRED( checkIs( Json::Value(Json::UInt64(kuint64max)), checks ) ); + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( kuint64max == val.asUInt64()); + JSONTEST_ASSERT( kuint64max == val.asLargestUInt()); + JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); + JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); #endif } @@ -278,14 +441,29 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_FIXTURE( ValueTest, nonIntegers ) { IsCheck checks; + Json::Value val; + + // Positive number + val = Json::Value(0.25); + + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - // Positive number - JSONTEST_ASSERT_PRED( checkIs( Json::Value(0.1), checks ) ); + JSONTEST_ASSERT( 0.25 == val.asDouble()); + JSONTEST_ASSERT( 0.25 == val.asFloat()); // Negative number - JSONTEST_ASSERT_PRED( checkIs( Json::Value(-0.1), checks ) ); + val = Json::Value(-0.25); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( -0.25 == val.asDouble()); + JSONTEST_ASSERT( -0.25 == val.asFloat()); } From b46d5ff4dad6332fe30d85bc52c6f00334506ff5 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 01:51:30 +0000 Subject: [PATCH 213/268] Fixed a test bug. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@213 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 26638f2..144919b 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -349,7 +349,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isIntegral_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); -#ifdef JSON_NO_INT64 +#ifndef JSON_NO_INT64 JSONTEST_ASSERT( kuint32max == val.asLargestInt()); #endif JSONTEST_ASSERT( kuint32max == val.asUInt()); From 65698b3b228a932bfff5b3739de7bdd2da5854ad Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 02:54:11 +0000 Subject: [PATCH 214/268] Added tests for default numeric values. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@214 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 53 ++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 144919b..37393c8 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -202,6 +202,42 @@ JSONTEST_FIXTURE( ValueTest, integers ) IsCheck checks; Json::Value val; + // Default int + val = Json::Value(Json::intValue); + + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); + + // Default uint + val = Json::Value(Json::uintValue); + + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); + + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); + // Zero (signed constructor arg) val = Json::Value(0); @@ -443,6 +479,23 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) IsCheck checks; Json::Value val; + // Default real + val = Json::Value(Json::realValue); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); + // Positive number val = Json::Value(0.25); From 4f99067c9916040db1b6686834bb76c358e3f289 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 03:16:49 +0000 Subject: [PATCH 215/268] Fixed bugs in asInt64 and asUInt64. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@215 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 4 +- trunk/jsoncpp/src/test_lib_json/main.cpp | 61 +++++++++++++++++++++++ 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 7c450ba..0819f3e 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -766,7 +766,7 @@ Value::asInt64() const return value_.uint_; case realValue: JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); - return Int( value_.real_ ); + return Int64( value_.real_ ); case booleanValue: return value_.bool_ ? 1 : 0; case stringValue: @@ -794,7 +794,7 @@ Value::asUInt64() const return value_.uint_; case realValue: JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); - return UInt( value_.real_ ); + return UInt64( value_.real_ ); case booleanValue: return value_.bool_ ? 1 : 0; case stringValue: diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 37393c8..07b1495 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -427,6 +427,67 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); #else // ifdef JSON_NO_INT64 + // 2^40 (signed constructor arg) + val = Json::Value(1LL << 40); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); + JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + + // 2^40 (unsigned constructor arg) + val = Json::Value(1ULL << 40); + + checks = IsCheck(); + checks.isUInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); + JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + + // 2^40 (floating-point constructor arg) + val = Json::Value((1LL << 40) / 1.0); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); + JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); + JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); + JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + + // -2^40 + val = Json::Value(-(1LL << 40)); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isNumeric_ = true; + checks.isIntegral_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( -(1LL << 40) == val.asInt64()); + JSONTEST_ASSERT( -(1LL << 40) == val.asLargestInt()); + JSONTEST_ASSERT( -(1LL << 40) == val.asDouble()); + JSONTEST_ASSERT( -(1LL << 40) == val.asFloat()); + // int64 max val = Json::Value(Json::Int64(kint64max)); From 6acdced74937f78b3453ad18e2ee7ce62f5eaf95 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 04:19:17 +0000 Subject: [PATCH 216/268] Gave a more consistent behavior to the Value::isFoo methods. See NEWS.txt for more details. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@216 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 20 ++ trunk/jsoncpp/include/json/value.h | 2 + trunk/jsoncpp/src/lib_json/json_value.cpp | 196 +++++++++------- trunk/jsoncpp/src/test_lib_json/main.cpp | 265 ++++++++++++++++++---- 4 files changed, 357 insertions(+), 126 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index e53b880..ac7c856 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -1,3 +1,23 @@ + New in SVN: + ----------- + +* Value + + - Updated the Value::isFoo methods to work as follows: + + * isInt, isInt64, isUInt, and isUInt64 return true if and only if the + value can be exactly representable as that type. In particular, a value + constructed with a double like 17.0 will now return true for all of + these methods. + + * isDouble and isFloat now return true for all numeric values, since all + numeric values can be converted to a double or float without + truncation. Note that the conversion may not be exact -- for example, + doubles cannot exactly represent integers above 2^53. + + * isBool, isNull, isString, isArray, and isObject now return true if and + only if the value is of that type. + New in JsonCpp 0.6.0: --------------------- diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index e3869e5..b013c9b 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -280,7 +280,9 @@ namespace Json { bool isNull() const; bool isBool() const; bool isInt() const; + bool isInt64() const; bool isUInt() const; + bool isUInt64() const; bool isIntegral() const; bool isDouble() const; bool isNumeric() const; diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 0819f3e..35ec41d 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -11,6 +11,7 @@ # include "json_batchallocator.h" # endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR #endif // if !defined(JSON_IS_AMALGAMATION) +#include #include #include #include @@ -539,6 +540,7 @@ Value::compare( const Value &other ) const } +// TODO(jacobsa): Check this for correctness given the new type-coallescing API. bool Value::operator <( const Value &other ) const { @@ -601,6 +603,7 @@ Value::operator >( const Value &other ) const return other < *this; } +// TODO(jacobsa): Check this for correctness given the new type-coallescing API. bool Value::operator ==( const Value &other ) const { @@ -694,59 +697,38 @@ Value::asConstString() const Value::Int Value::asInt() const { + JSON_ASSERT_MESSAGE(isInt(), "Value is not convertible to Int"); switch ( type_ ) { - case nullValue: - return 0; case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= minInt && value_.int_ <= maxInt, "unsigned integer out of signed int range" ); return Int(value_.int_); case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt(maxInt), "unsigned integer out of signed int range" ); return Int(value_.uint_); case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt && value_.real_ <= maxInt, "Real out of signed integer range" ); return Int( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to int" ); default: - JSON_ASSERT_UNREACHABLE; + break; } - return 0; // unreachable; + JSON_ASSERT_UNREACHABLE; + return 0; } Value::UInt Value::asUInt() const { + JSON_ASSERT_MESSAGE(isUInt(), "Value is not convertible to UInt"); switch ( type_ ) { - case nullValue: - return 0; case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to unsigned integer" ); - JSON_ASSERT_MESSAGE( UInt(value_.int_) <= maxUInt, "signed integer out of UInt range" ); return UInt(value_.int_); case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= maxUInt, "unsigned integer out of UInt range" ); return UInt(value_.uint_); case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt, "Real out of unsigned integer range" ); return UInt( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to uint" ); - default: - JSON_ASSERT_UNREACHABLE; } - return 0; // unreachable; + JSON_ASSERT_UNREACHABLE; + return 0; } @@ -755,55 +737,40 @@ Value::asUInt() const Value::Int64 Value::asInt64() const { + JSON_ASSERT_MESSAGE(isInt64(), "Value is not convertible to Int64"); switch ( type_ ) { - case nullValue: - return 0; case intValue: - return value_.int_; + return Int64(value_.int_); case uintValue: - JSON_ASSERT_MESSAGE( value_.uint_ <= UInt64(maxInt64), "unsigned integer out of Int64 range" ); - return value_.uint_; + return Int64(value_.uint_); case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= minInt64 && value_.real_ <= maxInt64, "Real out of Int64 range" ); return Int64( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to Int64" ); default: - JSON_ASSERT_UNREACHABLE; + break; } - return 0; // unreachable; + JSON_ASSERT_UNREACHABLE; + return 0; } Value::UInt64 Value::asUInt64() const { + JSON_ASSERT_MESSAGE(isUInt64(), "Value is not convertible to UInt64"); switch ( type_ ) { - case nullValue: - return 0; case intValue: - JSON_ASSERT_MESSAGE( value_.int_ >= 0, "Negative integer can not be converted to UInt64" ); - return value_.int_; + return UInt64(value_.int_); case uintValue: - return value_.uint_; + return UInt64(value_.uint_); case realValue: - JSON_ASSERT_MESSAGE( value_.real_ >= 0 && value_.real_ <= maxUInt64, "Real out of UInt64 range" ); return UInt64( value_.real_ ); - case booleanValue: - return value_.bool_ ? 1 : 0; - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to UInt64" ); default: - JSON_ASSERT_UNREACHABLE; + break; } + JSON_ASSERT_UNREACHABLE; + return 0; return 0; // unreachable; } # endif // if defined(JSON_HAS_INT64) @@ -836,8 +803,6 @@ Value::asDouble() const { switch ( type_ ) { - case nullValue: - return 0.0; case intValue: return static_cast( value_.int_ ); case uintValue: @@ -848,12 +813,12 @@ Value::asDouble() const #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) case realValue: return value_.real_; + case nullValue: case booleanValue: - return value_.bool_ ? 1.0 : 0.0; case stringValue: case arrayValue: case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to double" ); + JSON_FAIL_MESSAGE( "Value is not a double" ); default: JSON_ASSERT_UNREACHABLE; } @@ -865,8 +830,6 @@ Value::asFloat() const { switch ( type_ ) { - case nullValue: - return 0.0f; case intValue: return static_cast( value_.int_ ); case uintValue: @@ -877,12 +840,12 @@ Value::asFloat() const #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) case realValue: return static_cast( value_.real_ ); + case nullValue: case booleanValue: - return value_.bool_ ? 1.0f : 0.0f; case stringValue: case arrayValue: case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to float" ); + JSON_FAIL_MESSAGE( "Value is not a float" ); default: JSON_ASSERT_UNREACHABLE; } @@ -894,20 +857,16 @@ Value::asBool() const { switch ( type_ ) { + case booleanValue: + return value_.bool_; case nullValue: - return false; case intValue: case uintValue: - return value_.int_ != 0; case realValue: - return value_.real_ != 0.0; - case booleanValue: - return value_.bool_; case stringValue: - return value_.string_ && value_.string_[0] != 0; case arrayValue: case objectValue: - return value_.map_->size() != 0; + JSON_FAIL_MESSAGE( "Value is not a bool" ); default: JSON_ASSERT_UNREACHABLE; } @@ -1366,6 +1325,11 @@ Value::getMemberNames() const // //# endif +static bool IsIntegral(double d) { + double integral_part; + return modf(d, &integral_part) == 0.0; +} + bool Value::isNull() const @@ -1384,30 +1348,106 @@ Value::isBool() const bool Value::isInt() const { - return type_ == intValue; + switch ( type_ ) + { + case intValue: + return value_.int_ >= minInt && value_.int_ <= maxInt; + case uintValue: + return value_.uint_ <= UInt(maxInt); + case realValue: + return value_.real_ >= minInt && + value_.real_ <= maxInt && + IsIntegral(value_.real_); + default: + break; + } + return false; } bool Value::isUInt() const { - return type_ == uintValue; + switch ( type_ ) + { + case intValue: + return value_.int_ >= 0 && value_.int_ <= maxUInt; + case uintValue: + return value_.uint_ <= maxUInt; + case realValue: + return value_.real_ >= 0 && + value_.real_ <= maxUInt && + IsIntegral(value_.real_); + default: + break; + } + return false; +} + +bool +Value::isInt64() const +{ +# if defined(JSON_HAS_INT64) + switch ( type_ ) + { + case intValue: + return true; + case uintValue: + return value_.uint_ <= UInt64(maxInt64); + case realValue: + // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a + // double, so double(maxInt64) will be rounded up to 2^63. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= double(minInt64) && + value_.real_ < double(maxInt64) && + IsIntegral(value_.real_); + default: + break; + } +# endif // JSON_HAS_INT64 + return false; +} + +bool +Value::isUInt64() const +{ +# if defined(JSON_HAS_INT64) + switch ( type_ ) + { + case intValue: + return value_.int_ >= 0; + case uintValue: + return true; + case realValue: + // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a + // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= 0 && + value_.real_ < double(maxUInt64) && + IsIntegral(value_.real_); + default: + break; + } +# endif // JSON_HAS_INT64 + return false; } bool Value::isIntegral() const { - return type_ == intValue - || type_ == uintValue - || type_ == booleanValue; +#if defined(JSON_HAS_INT64) + return isInt64() || isUInt64(); +#else + return isInt() || isUInt(); +#endif } bool Value::isDouble() const { - return type_ == realValue; + return type_ == realValue || isIntegral(); } @@ -1428,14 +1468,14 @@ Value::isString() const bool Value::isArray() const { - return type_ == nullValue || type_ == arrayValue; + return type_ == arrayValue; } bool Value::isObject() const { - return type_ == nullValue || type_ == objectValue; + return type_ == objectValue; } diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 07b1495..0684c90 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -74,13 +74,16 @@ struct ValueTest : JsonTest::TestCase bool isObject_; bool isArray_; bool isBool_; - bool isDouble_; + bool isString_; + bool isNull_; + bool isInt_; + bool isInt64_; bool isUInt_; + bool isUInt64_; bool isIntegral_; + bool isDouble_; bool isNumeric_; - bool isString_; - bool isNull_; }; void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount ); @@ -120,6 +123,8 @@ JSONTEST_FIXTURE( ValueTest, objects ) JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); + JSONTEST_ASSERT_EQUAL( Json::objectValue, emptyObject_.type()); + // Access through const reference const Json::Value &constObject = object1_; @@ -145,6 +150,8 @@ JSONTEST_FIXTURE( ValueTest, arrays ) JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); + JSONTEST_ASSERT_EQUAL( Json::arrayValue, array1_.type()); + // Access through const reference const Json::Value &constArray = array1_; JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ); @@ -162,16 +169,18 @@ JSONTEST_FIXTURE( ValueTest, arrays ) JSONTEST_FIXTURE( ValueTest, null ) { + JSONTEST_ASSERT_EQUAL( Json::nullValue, null_.type()); + IsCheck checks; checks.isNull_ = true; - checks.isObject_ = true; - checks.isArray_ = true; JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); } JSONTEST_FIXTURE( ValueTest, strings ) { + JSONTEST_ASSERT_EQUAL( Json::stringValue, string1_.type()); + IsCheck checks; checks.isString_ = true; JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) ); @@ -185,10 +194,10 @@ JSONTEST_FIXTURE( ValueTest, strings ) JSONTEST_FIXTURE( ValueTest, bools ) { + JSONTEST_ASSERT_EQUAL( Json::booleanValue, false_.type()); + IsCheck checks; checks.isBool_ = true; - checks.isIntegral_ = true; - checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); @@ -209,8 +218,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( 0 == val.asInt()); @@ -226,9 +239,35 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 0 == val.asInt()); + JSONTEST_ASSERT( 0 == val.asLargestInt()); + JSONTEST_ASSERT( 0 == val.asUInt()); + JSONTEST_ASSERT( 0 == val.asLargestUInt()); + JSONTEST_ASSERT( 0.0 == val.asDouble()); + JSONTEST_ASSERT( 0.0 == val.asFloat()); + + // Default real + val = Json::Value(Json::realValue); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( 0 == val.asInt()); @@ -241,10 +280,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) // Zero (signed constructor arg) val = Json::Value(0); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( 0 == val.asInt()); @@ -257,10 +302,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) // Zero (unsigned constructor arg) val = Json::Value(0u); + JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; checks.isUInt_ = true; - checks.isNumeric_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( 0 == val.asInt()); @@ -273,7 +324,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // Zero (floating-point constructor arg) val = Json::Value(0.0); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); @@ -288,10 +346,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^20 (signed constructor arg) val = Json::Value(1 << 20); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( (1 << 20) == val.asInt()); @@ -304,10 +368,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^20 (unsigned constructor arg) val = Json::Value(1u << 20); + JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; checks.isUInt_ = true; - checks.isNumeric_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( (1 << 20) == val.asInt()); @@ -320,7 +390,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^20 (floating-point constructor arg) val = Json::Value((1 << 20) / 1.0); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); + checks.isInt_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); @@ -335,10 +412,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // -2^20 val = Json::Value(-(1 << 20)); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( -(1 << 20) == val.asInt()); @@ -349,10 +430,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) // int32 max val = Json::Value(kint32max); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( kint32max == val.asInt()); @@ -365,10 +452,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // int32 min val = Json::Value(kint32min); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( kint32min == val.asInt()); @@ -379,10 +470,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) // uint32 max val = Json::Value(kuint32max); + JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + checks = IsCheck(); + checks.isInt64_ = true; checks.isUInt_ = true; - checks.isNumeric_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); #ifndef JSON_NO_INT64 @@ -397,6 +493,8 @@ JSONTEST_FIXTURE( ValueTest, integers ) // int64 max val = Json::Value(double(kint64max)); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; @@ -408,6 +506,8 @@ JSONTEST_FIXTURE( ValueTest, integers ) // int64 min val = Json::Value(double(kint64min)); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; @@ -419,6 +519,8 @@ JSONTEST_FIXTURE( ValueTest, integers ) // uint64 max val = Json::Value(double(kuint64max)); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; @@ -430,10 +532,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^40 (signed constructor arg) val = Json::Value(1LL << 40); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); - checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); @@ -446,10 +552,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^40 (unsigned constructor arg) val = Json::Value(1ULL << 40); + JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + checks = IsCheck(); - checks.isUInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); @@ -462,7 +572,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) // 2^40 (floating-point constructor arg) val = Json::Value((1LL << 40) / 1.0); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); + checks.isInt64_ = true; + checks.isUInt64_ = true; + checks.isIntegral_ = true; checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); @@ -477,10 +592,13 @@ JSONTEST_FIXTURE( ValueTest, integers ) // -2^40 val = Json::Value(-(1LL << 40)); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); - checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( -(1LL << 40) == val.asInt64()); @@ -491,10 +609,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) // int64 max val = Json::Value(Json::Int64(kint64max)); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); - checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( kint64max == val.asInt64()); @@ -504,13 +626,34 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT( double(kint64max) == val.asDouble()); JSONTEST_ASSERT( float(kint64max) == val.asFloat()); + // int64 max (floating point constructor). Note that kint64max is not exactly + // representable as a double, and will be rounded up to be higher. + val = Json::Value(double(kint64max)); + + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + + checks = IsCheck(); + checks.isUInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( 9223372036854775808ULL == val.asUInt64()); + JSONTEST_ASSERT( 9223372036854775808ULL == val.asLargestUInt()); + JSONTEST_ASSERT( 9223372036854775808ULL == val.asDouble()); + JSONTEST_ASSERT( 9223372036854775808ULL == val.asFloat()); + // int64 min val = Json::Value(Json::Int64(kint64min)); + JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + checks = IsCheck(); - checks.isInt_ = true; - checks.isNumeric_ = true; + checks.isInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( kint64min == val.asInt64()); @@ -518,48 +661,68 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT( double(kint64min) == val.asDouble()); JSONTEST_ASSERT( float(kint64min) == val.asFloat()); + // int64 min (floating point constructor). Note that kint64min *is* exactly + // representable as a double. + val = Json::Value(double(kint64min)); + + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + + checks = IsCheck(); + checks.isInt64_ = true; + checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT( -9223372036854775808LL == val.asInt64()); + JSONTEST_ASSERT( -9223372036854775808LL == val.asLargestInt()); + JSONTEST_ASSERT( -9223372036854775808.0 == val.asDouble()); + JSONTEST_ASSERT( -9223372036854775808.0 == val.asFloat()); + // uint64 max val = Json::Value(Json::UInt64(kuint64max)); + JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + checks = IsCheck(); - checks.isUInt_ = true; - checks.isNumeric_ = true; + checks.isUInt64_ = true; checks.isIntegral_ = true; + checks.isDouble_ = true; + checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); JSONTEST_ASSERT( kuint64max == val.asUInt64()); JSONTEST_ASSERT( kuint64max == val.asLargestUInt()); JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); -#endif -} - -JSONTEST_FIXTURE( ValueTest, nonIntegers ) -{ - IsCheck checks; - Json::Value val; - - // Default real - val = Json::Value(Json::realValue); + // uint64 max (floating point constructor). Note that kuint64max is not + // exactly representable as a double, and will be rounded up to be higher. + val = Json::Value(double(kuint64max)); - JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT( 18446744073709551616.0 == val.asDouble()); + JSONTEST_ASSERT( 18446744073709551616.0 == val.asFloat()); +#endif +} + + +JSONTEST_FIXTURE( ValueTest, nonIntegers ) +{ + IsCheck checks; + Json::Value val; // Positive number val = Json::Value(0.25); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; @@ -571,6 +734,8 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) // Negative number val = Json::Value(-0.25); + JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; @@ -614,13 +779,15 @@ ValueTest::IsCheck::IsCheck() : isObject_( false ) , isArray_( false ) , isBool_( false ) - , isDouble_( false ) + , isString_( false ) + , isNull_( false ) , isInt_( false ) + , isInt64_( false ) , isUInt_( false ) + , isUInt64_( false ) , isIntegral_( false ) + , isDouble_( false ) , isNumeric_( false ) - , isString_( false ) - , isNull_( false ) { } @@ -633,7 +800,9 @@ ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL( check.isInt64_, value.isInt64() ); JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL( check.isUInt64_, value.isUInt64() ); JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); From e0e5ee0ed4828cb7741a9f9dcbb8e8252df95289 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 04:34:57 +0000 Subject: [PATCH 217/268] Fixed a compilation warning/error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@217 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 35ec41d..b279953 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -726,6 +726,8 @@ Value::asUInt() const return UInt(value_.uint_); case realValue: return UInt( value_.real_ ); + default: + break; } JSON_ASSERT_UNREACHABLE; return 0; From 4851a456cc9736cd48c8cba61b114629eb5c4b6c Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 04:59:57 +0000 Subject: [PATCH 218/268] Added line breaks to make error messages easier to read. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@218 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h index eb87010..5a27d5f 100644 --- a/trunk/jsoncpp/include/json/assertions.h +++ b/trunk/jsoncpp/include/json/assertions.h @@ -18,7 +18,7 @@ #define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); #else // defined(JSON_USE_EXCEPTION) #define JSON_ASSERT( condition ) assert( condition ); -#define JSON_FAIL_MESSAGE( message ) { std::cerr << message; exit(123); } +#define JSON_FAIL_MESSAGE( message ) { std::cerr << std::endl << message << std::endl; exit(123); } #endif #define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) { JSON_FAIL_MESSAGE( message ) } From f80486447fa469b7e5054e87c365ffee43df4947 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 05:50:13 +0000 Subject: [PATCH 219/268] Fixed a 'comparison between signed and unsigned' error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@219 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index b279953..cf83aa7 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1373,7 +1373,7 @@ Value::isUInt() const switch ( type_ ) { case intValue: - return value_.int_ >= 0 && value_.int_ <= maxUInt; + return value_.int_ >= 0 && LargestUInt(value_.int_) <= LargestUInt(maxUInt); case uintValue: return value_.uint_ <= maxUInt; case realValue: From 6952242cfa763c938faa6d72aee11385620c88fe Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 05:53:59 +0000 Subject: [PATCH 220/268] Fixed test failures with 64-bit support disabled. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@220 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 0684c90..61a4a93 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -800,13 +800,19 @@ ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isInt64_, value.isInt64() ); JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt64_, value.isUInt64() ); JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); + +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL( check.isInt64_, value.isInt64() ); + JSONTEST_ASSERT_EQUAL( check.isUInt64_, value.isUInt64() ); +#else + JSONTEST_ASSERT_EQUAL( false, value.isInt64() ); + JSONTEST_ASSERT_EQUAL( false, value.isUInt64() ); +#endif } From c7b80daebc4fd8897b965aaf88c7c3a7eab29a1a Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 25 May 2011 23:26:58 +0000 Subject: [PATCH 221/268] Removed some out of date TODOs. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@221 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 -- trunk/jsoncpp/src/test_lib_json/main.cpp | 4 ---- 2 files changed, 6 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index cf83aa7..aed7542 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -540,7 +540,6 @@ Value::compare( const Value &other ) const } -// TODO(jacobsa): Check this for correctness given the new type-coallescing API. bool Value::operator <( const Value &other ) const { @@ -603,7 +602,6 @@ Value::operator >( const Value &other ) const return other < *this; } -// TODO(jacobsa): Check this for correctness given the new type-coallescing API. bool Value::operator ==( const Value &other ) const { diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 61a4a93..1c7f242 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -10,10 +10,6 @@ #include #include "jsontest.h" -// TODO: -// - boolean value returns that they are integral. Should not be. -// - unsigned integer in integer range are not considered to be valid integer. Should check range. - // Make numeric limits more convenient to talk about. #define kint32max std::numeric_limits::max() #define kint32min std::numeric_limits::min() From 01770aad1325245e891f5fffc845f7877c7e2f54 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 00:12:48 +0000 Subject: [PATCH 222/268] Made jsontest work with 64-bit integers, and fixed an error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@222 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 19 +++++++++++++++++++ trunk/jsoncpp/src/test_lib_json/jsontest.h | 5 +++++ 2 files changed, 24 insertions(+) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index 02e7b21..13d1e35 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -275,6 +275,25 @@ TestResult::operator << ( unsigned int value ) } +#ifdef JSON_HAS_INT64 +TestResult & +TestResult::operator << ( Json::Int64 value ) +{ + char buffer[32]; + sprintf( buffer, "%lld", value ); + return addToLastFailure( buffer ); +} + +TestResult & +TestResult::operator << ( Json::UInt64 value ) +{ + char buffer[32]; + sprintf( buffer, "%ull", value ); + return addToLastFailure( buffer ); +} +#endif + + TestResult & TestResult::operator << ( double value ) { diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 0d07238..5d0eb1a 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -87,6 +87,10 @@ namespace JsonTest { TestResult &operator << ( bool value ); TestResult &operator << ( int value ); TestResult &operator << ( unsigned int value ); +#ifdef JSON_HAS_INT64 + TestResult &operator << ( Json::Int64 value ); + TestResult &operator << ( Json::UInt64 value ); +#endif TestResult &operator << ( double value ); TestResult &operator << ( const char *value ); TestResult &operator << ( const std::string &value ); @@ -229,6 +233,7 @@ namespace JsonTest { #define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \ JsonTest::checkStringEqual( *result_, \ std::string(expected), std::string(actual), \ + __FILE__, __LINE__, \ #expected " == " #actual ) /// \brief Begin a fixture test case. From f8c84c1aca9b9b7b551d1e52c478939278e75fc3 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 00:30:39 +0000 Subject: [PATCH 223/268] Made the unit test's output more readable, adding to jsontest's capabilities (and simplifying its implementation) in the process. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@223 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 68 ---- trunk/jsoncpp/src/test_lib_json/jsontest.h | 28 +- trunk/jsoncpp/src/test_lib_json/main.cpp | 378 +++++++++---------- 3 files changed, 205 insertions(+), 269 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index 13d1e35..3dda9f1 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -250,74 +250,6 @@ TestResult::addToLastFailure( const std::string &message ) } -TestResult & -TestResult::operator << ( bool value ) -{ - return addToLastFailure( value ? "true" : "false" ); -} - - -TestResult & -TestResult::operator << ( int value ) -{ - char buffer[32]; - sprintf( buffer, "%d", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( unsigned int value ) -{ - char buffer[32]; - sprintf( buffer, "%u", value ); - return addToLastFailure( buffer ); -} - - -#ifdef JSON_HAS_INT64 -TestResult & -TestResult::operator << ( Json::Int64 value ) -{ - char buffer[32]; - sprintf( buffer, "%lld", value ); - return addToLastFailure( buffer ); -} - -TestResult & -TestResult::operator << ( Json::UInt64 value ) -{ - char buffer[32]; - sprintf( buffer, "%ull", value ); - return addToLastFailure( buffer ); -} -#endif - - -TestResult & -TestResult::operator << ( double value ) -{ - char buffer[32]; - sprintf( buffer, "%16g", value ); - return addToLastFailure( buffer ); -} - - -TestResult & -TestResult::operator << ( const char *value ) -{ - return addToLastFailure( value ? value - : "" ); -} - - -TestResult & -TestResult::operator << ( const std::string &value ) -{ - return addToLastFailure( value ); -} - - // class TestCase // ////////////////////////////////////////////////////////////////// diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 5d0eb1a..28792a9 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -7,8 +7,10 @@ # define JSONTEST_H_INCLUDED # include +# include # include # include +# include # include // ////////////////////////////////////////////////////////////////// @@ -84,16 +86,18 @@ namespace JsonTest { void printFailure( bool printTestName ) const; - TestResult &operator << ( bool value ); - TestResult &operator << ( int value ); - TestResult &operator << ( unsigned int value ); -#ifdef JSON_HAS_INT64 - TestResult &operator << ( Json::Int64 value ); - TestResult &operator << ( Json::UInt64 value ); -#endif - TestResult &operator << ( double value ); - TestResult &operator << ( const char *value ); - TestResult &operator << ( const std::string &value ); + // Generic operator that will work with anything ostream can deal with. + template + TestResult &operator << ( const T& value ) { + std::ostringstream oss; + oss << value; + return addToLastFailure(oss.str()); + } + + // Specialized versions. + TestResult &operator << ( bool value ) { + return addToLastFailure(value ? "true" : "false"); + } private: TestResult &addToLastFailure( const std::string &message ); @@ -177,9 +181,9 @@ namespace JsonTest { Factories tests_; }; - template + template TestResult & - checkEqual( TestResult &result, const T &expected, const T &actual, + checkEqual( TestResult &result, const T &expected, const U &actual, const char *file, unsigned int line, const char *expr ) { if ( expected != actual ) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 1c7f242..6dc4782 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -119,20 +119,20 @@ JSONTEST_FIXTURE( ValueTest, objects ) JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) ); - JSONTEST_ASSERT_EQUAL( Json::objectValue, emptyObject_.type()); + JSONTEST_ASSERT_EQUAL(Json::objectValue, emptyObject_.type()); // Access through const reference const Json::Value &constObject = object1_; - JSONTEST_ASSERT( Json::Value(1234) == constObject["id"] ); - JSONTEST_ASSERT( Json::Value() == constObject["unknown id"] ); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constObject["id"]); + JSONTEST_ASSERT_EQUAL(Json::Value(), constObject["unknown id"]); // Access through non-const reference - JSONTEST_ASSERT( Json::Value(1234) == object1_["id"] ); - JSONTEST_ASSERT( Json::Value() == object1_["unknown id"] ); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), object1_["id"]); + JSONTEST_ASSERT_EQUAL(Json::Value(), object1_["unknown id"]); object1_["some other id"] = "foo"; - JSONTEST_ASSERT( Json::Value("foo") == object1_["some other id"] ); + JSONTEST_ASSERT_EQUAL(Json::Value("foo"), object1_["some other id"]); } @@ -146,26 +146,26 @@ JSONTEST_FIXTURE( ValueTest, arrays ) JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) ); - JSONTEST_ASSERT_EQUAL( Json::arrayValue, array1_.type()); + JSONTEST_ASSERT_EQUAL(Json::arrayValue, array1_.type()); // Access through const reference const Json::Value &constArray = array1_; - JSONTEST_ASSERT( Json::Value(1234) == constArray[index0] ); - JSONTEST_ASSERT( Json::Value(1234) == constArray[0] ); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[index0]); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[0]); // Access through non-const reference - JSONTEST_ASSERT( Json::Value(1234) == array1_[index0] ); - JSONTEST_ASSERT( Json::Value(1234) == array1_[0] ); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[index0]); + JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[0]); array1_[2] = Json::Value(17); - JSONTEST_ASSERT( Json::Value() == array1_[1] ); - JSONTEST_ASSERT( Json::Value(17) == array1_[2] ); + JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]); + JSONTEST_ASSERT_EQUAL(Json::Value(17), array1_[2]); } JSONTEST_FIXTURE( ValueTest, null ) { - JSONTEST_ASSERT_EQUAL( Json::nullValue, null_.type()); + JSONTEST_ASSERT_EQUAL(Json::nullValue, null_.type()); IsCheck checks; checks.isNull_ = true; @@ -175,7 +175,7 @@ JSONTEST_FIXTURE( ValueTest, null ) JSONTEST_FIXTURE( ValueTest, strings ) { - JSONTEST_ASSERT_EQUAL( Json::stringValue, string1_.type()); + JSONTEST_ASSERT_EQUAL(Json::stringValue, string1_.type()); IsCheck checks; checks.isString_ = true; @@ -183,22 +183,22 @@ JSONTEST_FIXTURE( ValueTest, strings ) JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); - JSONTEST_ASSERT( std::string("a") == string1_.asString()); - JSONTEST_ASSERT( std::string("a") == string1_.asCString()); + JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asString()); + JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asCString()); } JSONTEST_FIXTURE( ValueTest, bools ) { - JSONTEST_ASSERT_EQUAL( Json::booleanValue, false_.type()); + JSONTEST_ASSERT_EQUAL(Json::booleanValue, false_.type()); IsCheck checks; checks.isBool_ = true; JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); - JSONTEST_ASSERT( true == true_.asBool()); - JSONTEST_ASSERT( false == false_.asBool()); + JSONTEST_ASSERT_EQUAL(true, true_.asBool()); + JSONTEST_ASSERT_EQUAL(false, false_.asBool()); } @@ -222,12 +222,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // Default uint val = Json::Value(Json::uintValue); @@ -244,12 +244,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // Default real val = Json::Value(Json::realValue); @@ -266,17 +266,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // Zero (signed constructor arg) val = Json::Value(0); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -288,17 +288,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // Zero (unsigned constructor arg) val = Json::Value(0u); - JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -310,17 +310,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // Zero (floating-point constructor arg) val = Json::Value(0.0); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -332,17 +332,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0 == val.asInt()); - JSONTEST_ASSERT( 0 == val.asLargestInt()); - JSONTEST_ASSERT( 0 == val.asUInt()); - JSONTEST_ASSERT( 0 == val.asLargestUInt()); - JSONTEST_ASSERT( 0.0 == val.asDouble()); - JSONTEST_ASSERT( 0.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, val.asUInt()); + JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); // 2^20 (signed constructor arg) val = Json::Value(1 << 20); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -354,17 +354,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1 << 20) == val.asInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); - JSONTEST_ASSERT( (1 << 20) == val.asUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asDouble()); - JSONTEST_ASSERT( (1 << 20) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); // 2^20 (unsigned constructor arg) val = Json::Value(1u << 20); - JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -376,17 +376,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1 << 20) == val.asInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); - JSONTEST_ASSERT( (1 << 20) == val.asUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asDouble()); - JSONTEST_ASSERT( (1 << 20) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); // 2^20 (floating-point constructor arg) val = Json::Value((1 << 20) / 1.0); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -398,17 +398,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1 << 20) == val.asInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestInt()); - JSONTEST_ASSERT( (1 << 20) == val.asUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asLargestUInt()); - JSONTEST_ASSERT( (1 << 20) == val.asDouble()); - JSONTEST_ASSERT( (1 << 20) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); // -2^20 val = Json::Value(-(1 << 20)); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -418,15 +418,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( -(1 << 20) == val.asInt()); - JSONTEST_ASSERT( -(1 << 20) == val.asLargestInt()); - JSONTEST_ASSERT( -(1 << 20) == val.asDouble()); - JSONTEST_ASSERT( -(1 << 20) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asInt()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asDouble()); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asFloat()); // int32 max val = Json::Value(kint32max); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -438,17 +438,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( kint32max == val.asInt()); - JSONTEST_ASSERT( kint32max == val.asLargestInt()); - JSONTEST_ASSERT( kint32max == val.asUInt()); - JSONTEST_ASSERT( kint32max == val.asLargestUInt()); - JSONTEST_ASSERT( kint32max == val.asDouble()); - JSONTEST_ASSERT( kint32max == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asUInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kint32max, val.asFloat()); // int32 min val = Json::Value(kint32min); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt_ = true; @@ -458,15 +458,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( kint32min == val.asInt()); - JSONTEST_ASSERT( kint32min == val.asLargestInt()); - JSONTEST_ASSERT( kint32min == val.asDouble()); - JSONTEST_ASSERT( kint32min == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asInt()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kint32min, val.asFloat()); // uint32 max val = Json::Value(kuint32max); - JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -478,57 +478,57 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); #ifndef JSON_NO_INT64 - JSONTEST_ASSERT( kuint32max == val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestInt()); #endif - JSONTEST_ASSERT( kuint32max == val.asUInt()); - JSONTEST_ASSERT( kuint32max == val.asLargestUInt()); - JSONTEST_ASSERT( kuint32max == val.asDouble()); - JSONTEST_ASSERT( kuint32max == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asUInt()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asDouble()); + JSONTEST_ASSERT_EQUAL(kuint32max, val.asFloat()); #ifdef JSON_NO_INT64 // int64 max val = Json::Value(double(kint64max)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( double(kint64max) == val.asDouble()); - JSONTEST_ASSERT( float(kint64max) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); // int64 min val = Json::Value(double(kint64min)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( double(kint64min) == val.asDouble()); - JSONTEST_ASSERT( float(kint64min) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); // uint64 max val = Json::Value(double(kuint64max)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); - JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); #else // ifdef JSON_NO_INT64 // 2^40 (signed constructor arg) val = Json::Value(1LL << 40); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -538,17 +538,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); - JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); // 2^40 (unsigned constructor arg) val = Json::Value(1ULL << 40); - JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -558,17 +558,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); - JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); // 2^40 (floating-point constructor arg) val = Json::Value((1LL << 40) / 1.0); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -578,17 +578,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( (1LL << 40) == val.asInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asUInt64()); - JSONTEST_ASSERT( (1LL << 40) == val.asLargestUInt()); - JSONTEST_ASSERT( (1LL << 40) == val.asDouble()); - JSONTEST_ASSERT( (1LL << 40) == val.asFloat()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); // -2^40 val = Json::Value(-(1LL << 40)); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -597,15 +597,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( -(1LL << 40) == val.asInt64()); - JSONTEST_ASSERT( -(1LL << 40) == val.asLargestInt()); - JSONTEST_ASSERT( -(1LL << 40) == val.asDouble()); - JSONTEST_ASSERT( -(1LL << 40) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asFloat()); // int64 max val = Json::Value(Json::Int64(kint64max)); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -615,18 +615,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( kint64max == val.asInt64()); - JSONTEST_ASSERT( kint64max == val.asLargestInt()); - JSONTEST_ASSERT( kint64max == val.asUInt64()); - JSONTEST_ASSERT( kint64max == val.asLargestUInt()); - JSONTEST_ASSERT( double(kint64max) == val.asDouble()); - JSONTEST_ASSERT( float(kint64max) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); // int64 max (floating point constructor). Note that kint64max is not exactly // representable as a double, and will be rounded up to be higher. val = Json::Value(double(kint64max)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isUInt64_ = true; @@ -635,15 +635,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 9223372036854775808ULL == val.asUInt64()); - JSONTEST_ASSERT( 9223372036854775808ULL == val.asLargestUInt()); - JSONTEST_ASSERT( 9223372036854775808ULL == val.asDouble()); - JSONTEST_ASSERT( 9223372036854775808ULL == val.asFloat()); + JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asDouble()); + JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asFloat()); // int64 min val = Json::Value(Json::Int64(kint64min)); - JSONTEST_ASSERT_EQUAL( Json::intValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -652,16 +652,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( kint64min == val.asInt64()); - JSONTEST_ASSERT( kint64min == val.asLargestInt()); - JSONTEST_ASSERT( double(kint64min) == val.asDouble()); - JSONTEST_ASSERT( float(kint64min) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); // int64 min (floating point constructor). Note that kint64min *is* exactly // representable as a double. val = Json::Value(double(kint64min)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isInt64_ = true; @@ -670,15 +670,15 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( -9223372036854775808LL == val.asInt64()); - JSONTEST_ASSERT( -9223372036854775808LL == val.asLargestInt()); - JSONTEST_ASSERT( -9223372036854775808.0 == val.asDouble()); - JSONTEST_ASSERT( -9223372036854775808.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808LL, val.asInt64()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808LL, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); // uint64 max val = Json::Value(Json::UInt64(kuint64max)); - JSONTEST_ASSERT_EQUAL( Json::uintValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); checks = IsCheck(); checks.isUInt64_ = true; @@ -687,24 +687,24 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( kuint64max == val.asUInt64()); - JSONTEST_ASSERT( kuint64max == val.asLargestUInt()); - JSONTEST_ASSERT( double(kuint64max) == val.asDouble()); - JSONTEST_ASSERT( float(kuint64max) == val.asFloat()); + JSONTEST_ASSERT_EQUAL(kuint64max, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(kuint64max, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); // uint64 max (floating point constructor). Note that kuint64max is not // exactly representable as a double, and will be rounded up to be higher. val = Json::Value(double(kuint64max)); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 18446744073709551616.0 == val.asDouble()); - JSONTEST_ASSERT( 18446744073709551616.0 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); + JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); #endif } @@ -717,28 +717,28 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) // Positive number val = Json::Value(0.25); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( 0.25 == val.asDouble()); - JSONTEST_ASSERT( 0.25 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(0.25, val.asDouble()); + JSONTEST_ASSERT_EQUAL(0.25, val.asFloat()); // Negative number val = Json::Value(-0.25); - JSONTEST_ASSERT_EQUAL( Json::realValue, val.type()); + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); checks = IsCheck(); checks.isDouble_ = true; checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT( -0.25 == val.asDouble()); - JSONTEST_ASSERT( -0.25 == val.asFloat()); + JSONTEST_ASSERT_EQUAL(-0.25, val.asDouble()); + JSONTEST_ASSERT_EQUAL(-0.25, val.asFloat()); } @@ -757,7 +757,7 @@ ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expecte void ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount ) { - JSONTEST_ASSERT_EQUAL( expectedCount, value.size() ); + JSONTEST_ASSERT_EQUAL(expectedCount, value.size() ); unsigned int count = 0; Json::Value::iterator itEnd = value.end(); @@ -791,23 +791,23 @@ ValueTest::IsCheck::IsCheck() void ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) { - JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() ); - JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() ); - JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() ); - JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() ); - JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() ); - JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() ); - JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() ); - JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() ); - JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() ); - JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() ); + JSONTEST_ASSERT_EQUAL(check.isObject_, value.isObject() ); + JSONTEST_ASSERT_EQUAL(check.isArray_, value.isArray() ); + JSONTEST_ASSERT_EQUAL(check.isBool_, value.isBool() ); + JSONTEST_ASSERT_EQUAL(check.isDouble_, value.isDouble() ); + JSONTEST_ASSERT_EQUAL(check.isInt_, value.isInt() ); + JSONTEST_ASSERT_EQUAL(check.isUInt_, value.isUInt() ); + JSONTEST_ASSERT_EQUAL(check.isIntegral_, value.isIntegral() ); + JSONTEST_ASSERT_EQUAL(check.isNumeric_, value.isNumeric() ); + JSONTEST_ASSERT_EQUAL(check.isString_, value.isString() ); + JSONTEST_ASSERT_EQUAL(check.isNull_, value.isNull() ); #ifdef JSON_HAS_INT64 - JSONTEST_ASSERT_EQUAL( check.isInt64_, value.isInt64() ); - JSONTEST_ASSERT_EQUAL( check.isUInt64_, value.isUInt64() ); + JSONTEST_ASSERT_EQUAL(check.isInt64_, value.isInt64() ); + JSONTEST_ASSERT_EQUAL(check.isUInt64_, value.isUInt64() ); #else - JSONTEST_ASSERT_EQUAL( false, value.isInt64() ); - JSONTEST_ASSERT_EQUAL( false, value.isUInt64() ); + JSONTEST_ASSERT_EQUAL(false, value.isInt64() ); + JSONTEST_ASSERT_EQUAL(false, value.isUInt64() ); #endif } From 54e55c1d3e84e720d2b1c6716e5da2997fc1122f Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 02:46:28 +0000 Subject: [PATCH 224/268] Reworked the type conversion system again, so that:A * isFoo methods determine exact representability. * asFoo methods cause casting when safe. * isConvertibleTo indicates whether casting is safe. See NEWS.txt for details. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@224 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 50 ++- trunk/jsoncpp/src/lib_json/json_value.cpp | 160 +++++--- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 3 +- trunk/jsoncpp/src/test_lib_json/main.cpp | 404 ++++++++++++++++++- 4 files changed, 522 insertions(+), 95 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index ac7c856..86c28c0 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -1,22 +1,36 @@ - New in SVN: - ----------- +New in SVN +---------- + + * Updated the type system's behavior, in order to better support backwards + compatibility with code that was written before 64-bit integer support was + introduced. Here's how it works now: + + * isInt, isInt64, isUInt, and isUInt64 return true if and only if the + value can be exactly represented as that type. In particular, a value + constructed with a double like 17.0 will now return true for all of + these methods. + + * isDouble and isFloat now return true for all numeric values, since all + numeric values can be converted to a double or float without + truncation. Note however that the conversion may not be exact -- for + example, doubles cannot exactly represent all integers above 2^53 + 1. + + * isBool, isNull, isString, isArray, and isObject now return true if and + only if the value is of that type. + + * isConvertibleTo(fooValue) indicates that it is safe to call asFoo. + (For each type foo, isFoo always implies isConvertibleTo(fooValue).) + asFoo returns an approximate or exact representation as appropriate. + For example, a double value may be truncated when asInt is called. + + * For backwards compatibility with old code, isConvertibleTo(intValue) + may return false even if type() == intValue. This is because the value + may have been constructed with a 64-bit integer larger than maxInt, + and calling asInt() would cause an exception. If you're writing new + code, use isInt64 to find out whether the value is exactly + representable using an Int64, or asDouble() combined with minInt64 and + maxInt64 to figure out whether it is approximately representable. -* Value - - - Updated the Value::isFoo methods to work as follows: - - * isInt, isInt64, isUInt, and isUInt64 return true if and only if the - value can be exactly representable as that type. In particular, a value - constructed with a double like 17.0 will now return true for all of - these methods. - - * isDouble and isFloat now return true for all numeric values, since all - numeric values can be converted to a double or float without - truncation. Note that the conversion may not be exact -- for example, - doubles cannot exactly represent integers above 2^53. - - * isBool, isNull, isString, isArray, and isObject now return true if and - only if the value is of that type. New in JsonCpp 0.6.0: --------------------- diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index aed7542..522817b 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -13,6 +13,7 @@ #endif // if !defined(JSON_IS_AMALGAMATION) #include #include +#include #include #include #include @@ -43,6 +44,11 @@ const LargestUInt Value::maxLargestUInt = LargestUInt(-1); /// Unknown size marker static const unsigned int unknown = (unsigned)-1; +template +static inline bool InRange(double d, T min, U max) { + return d >= min && d <= max; +} + /** Duplicates the specified string value. * @param value Pointer to the string to duplicate. Must be zero-terminated if @@ -663,6 +669,9 @@ Value::asCString() const std::string Value::asString() const { + // Let the STL sort it out for numeric types. + std::ostringstream oss; + switch ( type_ ) { case nullValue: @@ -672,15 +681,18 @@ Value::asString() const case booleanValue: return value_.bool_ ? "true" : "false"; case intValue: + oss << value_.int_; + break; case uintValue: + oss << value_.uint_; + break; case realValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Type is not convertible to string" ); + oss << value_.real_; + break; default: - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE( "Type is not convertible to string" ); } - return ""; // unreachable + return oss.str(); } # ifdef JSON_USE_CPPTL @@ -695,17 +707,23 @@ Value::asConstString() const Value::Int Value::asInt() const { - JSON_ASSERT_MESSAGE(isInt(), "Value is not convertible to Int"); switch ( type_ ) { case intValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestInt out of Int range"); return Int(value_.int_); case uintValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestUInt out of Int range"); return Int(value_.uint_); case realValue: - return Int( value_.real_ ); + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt, maxInt), "double out of Int range"); + return Int(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; default: - break; + JSON_FAIL_MESSAGE("Value is not convertible to Int."); } JSON_ASSERT_UNREACHABLE; return 0; @@ -715,17 +733,23 @@ Value::asInt() const Value::UInt Value::asUInt() const { - JSON_ASSERT_MESSAGE(isUInt(), "Value is not convertible to UInt"); switch ( type_ ) { case intValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestInt out of UInt range"); return UInt(value_.int_); case uintValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestUInt out of UInt range"); return UInt(value_.uint_); case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt), "double out of UInt range"); return UInt( value_.real_ ); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; default: - break; + JSON_FAIL_MESSAGE("Value is not convertible to UInt."); } JSON_ASSERT_UNREACHABLE; return 0; @@ -737,17 +761,22 @@ Value::asUInt() const Value::Int64 Value::asInt64() const { - JSON_ASSERT_MESSAGE(isInt64(), "Value is not convertible to Int64"); switch ( type_ ) { case intValue: return Int64(value_.int_); case uintValue: + JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range"); return Int64(value_.uint_); case realValue: - return Int64( value_.real_ ); + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64), "double out of Int64 range"); + return Int64(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; default: - break; + JSON_FAIL_MESSAGE("Value is not convertible to Int64."); } JSON_ASSERT_UNREACHABLE; return 0; @@ -757,15 +786,20 @@ Value::asInt64() const Value::UInt64 Value::asUInt64() const { - JSON_ASSERT_MESSAGE(isUInt64(), "Value is not convertible to UInt64"); switch ( type_ ) { case intValue: + JSON_ASSERT_MESSAGE(isUInt64(), "LargestInt out of UInt64 range"); return UInt64(value_.int_); case uintValue: return UInt64(value_.uint_); case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt64), "double out of UInt64 range"); return UInt64( value_.real_ ); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; default: break; } @@ -814,13 +848,11 @@ Value::asDouble() const case realValue: return value_.real_; case nullValue: + return 0.0; case booleanValue: - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Value is not a double" ); + return value_.bool_ ? 1.0 : 0.0; default: - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to double."); } return 0; // unreachable; } @@ -841,15 +873,14 @@ Value::asFloat() const case realValue: return static_cast( value_.real_ ); case nullValue: + return 0.0; case booleanValue: - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Value is not a float" ); + return value_.bool_ ? 1.0 : 0.0; default: - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to float."); } - return 0.0f; // unreachable; + JSON_ASSERT_UNREACHABLE; + return 0.0f; } bool @@ -860,68 +891,67 @@ Value::asBool() const case booleanValue: return value_.bool_; case nullValue: + return false; case intValue: + return value_.int_ ? true : false; case uintValue: + return value_.uint_ ? true : false; case realValue: - case stringValue: - case arrayValue: - case objectValue: - JSON_FAIL_MESSAGE( "Value is not a bool" ); + return value_.real_ ? true : false; default: - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to bool."); } - return false; // unreachable; + JSON_ASSERT_UNREACHABLE; + return false; } bool Value::isConvertibleTo( ValueType other ) const { - switch ( type_ ) + switch ( other ) { case nullValue: - return true; + return ( isNumeric() && asDouble() == 0.0 ) + || ( type_ == booleanValue && value_.bool_ == false ) + || ( type_ == stringValue && asString() == "" ) + || ( type_ == arrayValue && value_.map_->size() == 0 ) + || ( type_ == objectValue && value_.map_->size() == 0 ) + || type_ == nullValue; case intValue: - return ( other == nullValue && value_.int_ == 0 ) - || other == intValue - || ( other == uintValue && value_.int_ >= 0 ) - || other == realValue - || other == stringValue - || other == booleanValue; + return isInt() + || (type_ == realValue && InRange(value_.real_, minInt, maxInt)) + || type_ == booleanValue + || type_ == nullValue; case uintValue: - return ( other == nullValue && value_.uint_ == 0 ) - || ( other == intValue && value_.uint_ <= (LargestUInt)maxInt ) - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; + return isUInt() + || (type_ == realValue && InRange(value_.real_, 0, maxUInt)) + || type_ == booleanValue + || type_ == nullValue; case realValue: - return ( other == nullValue && value_.real_ == 0.0 ) - || ( other == intValue && value_.real_ >= minInt && value_.real_ <= maxInt ) - || ( other == uintValue && value_.real_ >= 0 && value_.real_ <= maxUInt ) - || other == realValue - || other == stringValue - || other == booleanValue; + return isNumeric() + || type_ == booleanValue + || type_ == nullValue; case booleanValue: - return ( other == nullValue && value_.bool_ == false ) - || other == intValue - || other == uintValue - || other == realValue - || other == stringValue - || other == booleanValue; + return isNumeric() + || type_ == booleanValue + || type_ == nullValue; case stringValue: - return other == stringValue - || ( other == nullValue && (!value_.string_ || value_.string_[0] == 0) ); + return isNumeric() + || type_ == booleanValue + || type_ == stringValue + || type_ == nullValue; case arrayValue: - return other == arrayValue - || ( other == nullValue && value_.map_->size() == 0 ); + return type_ == arrayValue + || type_ == nullValue; case objectValue: - return other == objectValue - || ( other == nullValue && value_.map_->size() == 0 ); + return type_ == objectValue + || type_ == nullValue; default: - JSON_ASSERT_UNREACHABLE; + break; } - return false; // unreachable; + JSON_ASSERT_UNREACHABLE; + return false; } diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index 3dda9f1..46f2eea 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -250,7 +250,6 @@ TestResult::addToLastFailure( const std::string &message ) } - // class TestCase // ////////////////////////////////////////////////////////////////// @@ -324,7 +323,7 @@ Runner::runTestAt( unsigned int index, TestResult &result ) const catch ( const std::exception &e ) { result.addFailure( __FILE__, __LINE__, - "Unexpected exception caugth:" ) << e.what(); + "Unexpected exception caught:" ) << e.what(); } #endif // if JSON_USE_EXCEPTION delete test; diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 6dc4782..14f49c1 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -121,6 +121,23 @@ JSONTEST_FIXTURE( ValueTest, objects ) JSONTEST_ASSERT_EQUAL(Json::objectValue, emptyObject_.type()); + // Empty object okay + JSONTEST_ASSERT(emptyObject_.isConvertibleTo(Json::nullValue)); + + // Non-empty object not okay + JSONTEST_ASSERT(!object1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(emptyObject_.isConvertibleTo(Json::objectValue)); + + // Never okay + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!emptyObject_.isConvertibleTo(Json::stringValue)); + // Access through const reference const Json::Value &constObject = object1_; @@ -148,6 +165,23 @@ JSONTEST_FIXTURE( ValueTest, arrays ) JSONTEST_ASSERT_EQUAL(Json::arrayValue, array1_.type()); + // Empty array okay + JSONTEST_ASSERT(emptyArray_.isConvertibleTo(Json::nullValue)); + + // Non-empty array not okay + JSONTEST_ASSERT(!array1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(emptyArray_.isConvertibleTo(Json::arrayValue)); + + // Never okay + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::objectValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!emptyArray_.isConvertibleTo(Json::stringValue)); + // Access through const reference const Json::Value &constArray = array1_; JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[index0]); @@ -170,6 +204,23 @@ JSONTEST_FIXTURE( ValueTest, null ) IsCheck checks; checks.isNull_ = true; JSONTEST_ASSERT_PRED( checkIs( null_, checks ) ); + + JSONTEST_ASSERT(null_.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(null_.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(Json::Int(0), null_.asInt()); + JSONTEST_ASSERT_EQUAL(Json::LargestInt(0), null_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(Json::UInt(0), null_.asUInt()); + JSONTEST_ASSERT_EQUAL(Json::LargestUInt(0), null_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, null_.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, null_.asFloat()); + JSONTEST_ASSERT_STRING_EQUAL("", null_.asString()); } @@ -183,6 +234,22 @@ JSONTEST_FIXTURE( ValueTest, strings ) JSONTEST_ASSERT_PRED( checkIs( string_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) ); + // Empty string okay + JSONTEST_ASSERT(emptyString_.isConvertibleTo(Json::nullValue)); + + // Non-empty string not okay + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(string1_.isConvertibleTo(Json::stringValue)); + + // Never okay + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::objectValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!string1_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asString()); JSONTEST_ASSERT_STRING_EQUAL("a", string1_.asCString()); } @@ -197,8 +264,38 @@ JSONTEST_FIXTURE( ValueTest, bools ) JSONTEST_ASSERT_PRED( checkIs( false_, checks ) ); JSONTEST_ASSERT_PRED( checkIs( true_, checks ) ); + // False okay + JSONTEST_ASSERT(false_.isConvertibleTo(Json::nullValue)); + + // True not okay + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::nullValue)); + + // Always okay + JSONTEST_ASSERT(true_.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(true_.isConvertibleTo(Json::stringValue)); + + // Never okay + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!true_.isConvertibleTo(Json::objectValue)); + JSONTEST_ASSERT_EQUAL(true, true_.asBool()); + JSONTEST_ASSERT_EQUAL(1, true_.asInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asUInt()); + JSONTEST_ASSERT_EQUAL(1, true_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(1.0, true_.asDouble()); + JSONTEST_ASSERT_EQUAL(1.0, true_.asFloat()); + JSONTEST_ASSERT_EQUAL(false, false_.asBool()); + JSONTEST_ASSERT_EQUAL(0, false_.asInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asLargestInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asUInt()); + JSONTEST_ASSERT_EQUAL(0, false_.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(0.0, false_.asDouble()); + JSONTEST_ASSERT_EQUAL(0.0, false_.asFloat()); } @@ -207,6 +304,25 @@ JSONTEST_FIXTURE( ValueTest, integers ) IsCheck checks; Json::Value val; + // Conversions that don't depend on the value. + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17).isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17U).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17U).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17U).isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(Json::Value(17.0).isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(!Json::Value(17.0).isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!Json::Value(17.0).isConvertibleTo(Json::objectValue)); + // Default int val = Json::Value(Json::intValue); @@ -222,12 +338,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(0, val.asUInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // Default uint val = Json::Value(Json::uintValue); @@ -244,18 +366,28 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(0, val.asUInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // Default real val = Json::Value(Json::realValue); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + checks = IsCheck(); checks.isInt_ = true; checks.isInt64_ = true; @@ -272,6 +404,8 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // Zero (signed constructor arg) val = Json::Value(0); @@ -288,12 +422,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(0, val.asUInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // Zero (unsigned constructor arg) val = Json::Value(0u); @@ -310,12 +450,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(0, val.asUInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // Zero (floating-point constructor arg) val = Json::Value(0.0); @@ -332,12 +478,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(0, val.asInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(0, val.asUInt()); JSONTEST_ASSERT_EQUAL(0, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(false, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); // 2^20 (signed constructor arg) val = Json::Value(1 << 20); @@ -354,12 +506,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1048576", val.asString()); // 2^20 (unsigned constructor arg) val = Json::Value(1u << 20); @@ -376,12 +534,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1048576", val.asString()); // 2^20 (floating-point constructor arg) val = Json::Value((1 << 20) / 1.0); @@ -398,12 +562,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1 << 20), val.asInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.04858e+06", val.asString()); // -2^20 val = Json::Value(-(1 << 20)); @@ -418,10 +588,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asInt()); JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asLargestInt()); JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL(-(1 << 20), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-1048576", val.asString()); // int32 max val = Json::Value(kint32max); @@ -438,12 +614,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(kint32max, val.asInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asUInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asDouble()); JSONTEST_ASSERT_EQUAL(kint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("2147483647", val.asString()); // int32 min val = Json::Value(kint32min); @@ -458,10 +640,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(kint32min, val.asInt()); JSONTEST_ASSERT_EQUAL(kint32min, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(kint32min, val.asDouble()); JSONTEST_ASSERT_EQUAL(kint32min, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-2147483648", val.asString()); // uint32 max val = Json::Value(kuint32max); @@ -477,6 +665,10 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + #ifndef JSON_NO_INT64 JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestInt()); #endif @@ -484,6 +676,8 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(kuint32max, val.asDouble()); JSONTEST_ASSERT_EQUAL(kuint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("4294967295", val.asString()); #ifdef JSON_NO_INT64 // int64 max @@ -496,8 +690,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9.22337204e18", val.asString()); // int64 min val = Json::Value(double(kint64min)); @@ -509,8 +709,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9.22337204e18", val.asString()); // uint64 max val = Json::Value(double(kuint64max)); @@ -522,8 +728,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.84467441e19", val.asString()); #else // ifdef JSON_NO_INT64 // 2^40 (signed constructor arg) val = Json::Value(1LL << 40); @@ -538,12 +750,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); // 2^40 (unsigned constructor arg) val = Json::Value(1ULL << 40); @@ -558,12 +776,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); // 2^40 (floating-point constructor arg) val = Json::Value((1LL << 40) / 1.0); @@ -578,12 +802,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.09951e+12", val.asString()); // -2^40 val = Json::Value(-(1LL << 40)); @@ -597,10 +827,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asInt64()); JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asLargestInt()); JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asDouble()); JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-1099511627776", val.asString()); // int64 max val = Json::Value(Json::Int64(kint64max)); @@ -615,12 +851,18 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(kint64max, val.asInt64()); JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(kint64max, val.asUInt64()); JSONTEST_ASSERT_EQUAL(kint64max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9223372036854775807", val.asString()); // int64 max (floating point constructor). Note that kint64max is not exactly // representable as a double, and will be rounded up to be higher. @@ -635,10 +877,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asUInt64()); JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asDouble()); JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", val.asString()); // int64 min val = Json::Value(Json::Int64(kint64min)); @@ -652,10 +900,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64()); JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9223372036854775808", val.asString()); // int64 min (floating point constructor). Note that kint64min *is* exactly // representable as a double. @@ -670,10 +924,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT_EQUAL(-9223372036854775808LL, val.asInt64()); - JSONTEST_ASSERT_EQUAL(-9223372036854775808LL, val.asLargestInt()); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + + JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64()); + JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", val.asString()); // uint64 max val = Json::Value(Json::UInt64(kuint64max)); @@ -687,10 +947,16 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(kuint64max, val.asUInt64()); JSONTEST_ASSERT_EQUAL(kuint64max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("18446744073709551615", val.asString()); // uint64 max (floating point constructor). Note that kuint64max is not // exactly representable as a double, and will be rounded up to be higher. @@ -703,8 +969,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", val.asString()); #endif } @@ -714,8 +986,8 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) IsCheck checks; Json::Value val; - // Positive number - val = Json::Value(0.25); + // Small positive number + val = Json::Value(1.5); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); @@ -724,11 +996,52 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT_EQUAL(0.25, val.asDouble()); - JSONTEST_ASSERT_EQUAL(0.25, val.asFloat()); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(1.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(1.5, val.asFloat()); + JSONTEST_ASSERT_EQUAL(1, val.asInt()); + JSONTEST_ASSERT_EQUAL(1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(1, val.asUInt()); + JSONTEST_ASSERT_EQUAL(1, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("1.5", val.asString()); + + // Small negative number + val = Json::Value(-1.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - // Negative number - val = Json::Value(-0.25); + JSONTEST_ASSERT(val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(-1.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(-1.5, val.asFloat()); + JSONTEST_ASSERT_EQUAL(-1, val.asInt()); + JSONTEST_ASSERT_EQUAL(-1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("-1.5", val.asString()); + + // A bit over int32 max + val = Json::Value(kint32max + 0.5); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); @@ -737,8 +1050,79 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) checks.isNumeric_ = true; JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); - JSONTEST_ASSERT_EQUAL(-0.25, val.asDouble()); - JSONTEST_ASSERT_EQUAL(-0.25, val.asFloat()); + JSONTEST_ASSERT(val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(2147483647.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(2147483647.5), val.asFloat()); + JSONTEST_ASSERT_EQUAL(2147483647U, val.asUInt()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(2147483647L, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(2147483647U, val.asLargestUInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("2.14748e+09", val.asString()); + + // A bit under int32 min + val = Json::Value(kint32min - 0.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(-2147483648.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(-2147483648.5), val.asFloat()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(-2147483648LL, val.asLargestInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("-2.14748e+09", val.asString()); + + // A bit over uint32 max + val = Json::Value(kuint32max + 0.5); + + JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); + + checks = IsCheck(); + checks.isDouble_ = true; + checks.isNumeric_ = true; + JSONTEST_ASSERT_PRED( checkIs( val, checks ) ); + + JSONTEST_ASSERT(val.isConvertibleTo(Json::realValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::booleanValue)); + JSONTEST_ASSERT(val.isConvertibleTo(Json::stringValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::nullValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::arrayValue)); + JSONTEST_ASSERT(!val.isConvertibleTo(Json::objectValue)); + + JSONTEST_ASSERT_EQUAL(4294967295.5, val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(4294967295.5), val.asFloat()); +#ifdef JSON_HAS_INT64 + JSONTEST_ASSERT_EQUAL(4294967295LL, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(4294967295ULL, val.asLargestUInt()); +#endif + JSONTEST_ASSERT_EQUAL(true, val.asBool()); + JSONTEST_ASSERT_EQUAL("4.29497e+09", val.asString()); } From 903ccdefd650631df1b8d1a4aca7e44954fd620b Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 03:04:01 +0000 Subject: [PATCH 225/268] Fixed a double -> float compilation warning/error. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@225 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 522817b..d311288 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -875,7 +875,7 @@ Value::asFloat() const case nullValue: return 0.0; case booleanValue: - return value_.bool_ ? 1.0 : 0.0; + return value_.bool_ ? 1.0f : 0.0f; default: JSON_FAIL_MESSAGE("Value is not convertible to float."); } From ff5471fa77b69b95e2e4a901690b98d74cea4193 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 03:27:44 +0000 Subject: [PATCH 226/268] Got rid of some unreachable code. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@226 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index d311288..9c1a994 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -805,7 +805,6 @@ Value::asUInt64() const } JSON_ASSERT_UNREACHABLE; return 0; - return 0; // unreachable; } # endif // if defined(JSON_HAS_INT64) @@ -876,10 +875,8 @@ Value::asFloat() const return 0.0; case booleanValue: return value_.bool_ ? 1.0f : 0.0f; - default: - JSON_FAIL_MESSAGE("Value is not convertible to float."); } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to float."); return 0.0f; } @@ -898,10 +895,8 @@ Value::asBool() const return value_.uint_ ? true : false; case realValue: return value_.real_ ? true : false; - default: - JSON_FAIL_MESSAGE("Value is not convertible to bool."); } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to bool."); return false; } @@ -947,8 +942,6 @@ Value::isConvertibleTo( ValueType other ) const case objectValue: return type_ == objectValue || type_ == nullValue; - default: - break; } JSON_ASSERT_UNREACHABLE; return false; @@ -985,9 +978,8 @@ Value::size() const case objectValue: return Int( value_.map_->size() ); #endif - default: - JSON_ASSERT_UNREACHABLE; } + JSON_ASSERT_UNREACHABLE; return 0; // unreachable; } From 0596f2ff4821eb764661cb447d05941e6e31eaa9 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 03:32:11 +0000 Subject: [PATCH 227/268] Fixed more default cases. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@227 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 9c1a994..6a29048 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -723,9 +723,9 @@ Value::asInt() const case booleanValue: return value_.bool_ ? 1 : 0; default: - JSON_FAIL_MESSAGE("Value is not convertible to Int."); + break; } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to Int."); return 0; } @@ -749,9 +749,9 @@ Value::asUInt() const case booleanValue: return value_.bool_ ? 1 : 0; default: - JSON_FAIL_MESSAGE("Value is not convertible to UInt."); + break; } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to UInt."); return 0; } @@ -776,9 +776,9 @@ Value::asInt64() const case booleanValue: return value_.bool_ ? 1 : 0; default: - JSON_FAIL_MESSAGE("Value is not convertible to Int64."); + break; } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to Int64."); return 0; } @@ -803,7 +803,7 @@ Value::asUInt64() const default: break; } - JSON_ASSERT_UNREACHABLE; + JSON_FAIL_MESSAGE("Value is not convertible to UInt64."); return 0; } # endif // if defined(JSON_HAS_INT64) @@ -851,9 +851,10 @@ Value::asDouble() const case booleanValue: return value_.bool_ ? 1.0 : 0.0; default: - JSON_FAIL_MESSAGE("Value is not convertible to double."); + break; } - return 0; // unreachable; + JSON_FAIL_MESSAGE("Value is not convertible to double."); + return 0; } float @@ -875,6 +876,8 @@ Value::asFloat() const return 0.0; case booleanValue: return value_.bool_ ? 1.0f : 0.0f; + default: + break; } JSON_FAIL_MESSAGE("Value is not convertible to float."); return 0.0f; @@ -895,6 +898,8 @@ Value::asBool() const return value_.uint_ ? true : false; case realValue: return value_.real_ ? true : false; + default: + break; } JSON_FAIL_MESSAGE("Value is not convertible to bool."); return false; From 05daff3ece85516228d6f95b54dde39639596367 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 03:44:02 +0000 Subject: [PATCH 228/268] Another round of attempting to fix VC++ errors... git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@228 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 7 ------- 1 file changed, 7 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 6a29048..98ce606 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -726,7 +726,6 @@ Value::asInt() const break; } JSON_FAIL_MESSAGE("Value is not convertible to Int."); - return 0; } @@ -752,7 +751,6 @@ Value::asUInt() const break; } JSON_FAIL_MESSAGE("Value is not convertible to UInt."); - return 0; } @@ -779,7 +777,6 @@ Value::asInt64() const break; } JSON_FAIL_MESSAGE("Value is not convertible to Int64."); - return 0; } @@ -804,7 +801,6 @@ Value::asUInt64() const break; } JSON_FAIL_MESSAGE("Value is not convertible to UInt64."); - return 0; } # endif // if defined(JSON_HAS_INT64) @@ -854,7 +850,6 @@ Value::asDouble() const break; } JSON_FAIL_MESSAGE("Value is not convertible to double."); - return 0; } float @@ -880,7 +875,6 @@ Value::asFloat() const break; } JSON_FAIL_MESSAGE("Value is not convertible to float."); - return 0.0f; } bool @@ -902,7 +896,6 @@ Value::asBool() const break; } JSON_FAIL_MESSAGE("Value is not convertible to bool."); - return false; } From f5ecdcad4166083556f13560a80740fe9b50e7b2 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 06:58:14 +0000 Subject: [PATCH 229/268] Added a few test cases that Google is using internally for patches made in the past. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@229 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- .../jsoncpp/test/data/test_array_07.expected | 2122 +++++++++++++++++ trunk/jsoncpp/test/data/test_array_07.json | 2 + .../jsoncpp/test/data/test_string_04.expected | 2 + trunk/jsoncpp/test/data/test_string_04.json | 2 + .../jsoncpp/test/data/test_string_05.expected | 2 + trunk/jsoncpp/test/data/test_string_05.json | 2 + 6 files changed, 2132 insertions(+) create mode 100644 trunk/jsoncpp/test/data/test_array_07.expected create mode 100644 trunk/jsoncpp/test/data/test_array_07.json create mode 100644 trunk/jsoncpp/test/data/test_string_04.expected create mode 100644 trunk/jsoncpp/test/data/test_string_04.json create mode 100644 trunk/jsoncpp/test/data/test_string_05.expected create mode 100644 trunk/jsoncpp/test/data/test_string_05.json diff --git a/trunk/jsoncpp/test/data/test_array_07.expected b/trunk/jsoncpp/test/data/test_array_07.expected new file mode 100644 index 0000000..ee2fafc --- /dev/null +++ b/trunk/jsoncpp/test/data/test_array_07.expected @@ -0,0 +1,2122 @@ +.=[] +.[0]=[] +.[0][0]="A" +.[0][1]=0 +.[0][2]=1 +.[0][3]=2 +.[0][4]=3 +.[0][5]=4 +.[0][6]=5 +.[0][7]=6 +.[0][8]=7 +.[0][9]=8 +.[0][10]=9 +.[0][11]=10 +.[0][12]=11 +.[0][13]=12 +.[0][14]=13 +.[0][15]=14 +.[0][16]=15 +.[0][17]=16 +.[0][18]=17 +.[0][19]=18 +.[0][20]=19 +.[0][21]=20 +.[0][22]=21 +.[0][23]=22 +.[0][24]=23 +.[0][25]=24 +.[0][26]=25 +.[0][27]=26 +.[0][28]=27 +.[0][29]=28 +.[0][30]=29 +.[0][31]=30 +.[0][32]=31 +.[0][33]=32 +.[0][34]=33 +.[0][35]=34 +.[0][36]=35 +.[0][37]=36 +.[0][38]=37 +.[0][39]=38 +.[0][40]=39 +.[0][41]=40 +.[0][42]=41 +.[0][43]=42 +.[0][44]=43 +.[0][45]=44 +.[0][46]=45 +.[0][47]=46 +.[0][48]=47 +.[0][49]=48 +.[0][50]=49 +.[0][51]=50 +.[0][52]=51 +.[0][53]=52 +.[0][54]=53 +.[0][55]=54 +.[0][56]=55 +.[0][57]=56 +.[0][58]=57 +.[0][59]=58 +.[0][60]=59 +.[0][61]=60 +.[0][62]=61 +.[0][63]=62 +.[0][64]=63 +.[0][65]=64 +.[0][66]=65 +.[0][67]=66 +.[0][68]=67 +.[0][69]=68 +.[0][70]=69 +.[0][71]=70 +.[0][72]=71 +.[0][73]=72 +.[0][74]=73 +.[0][75]=74 +.[0][76]=75 +.[0][77]=76 +.[0][78]=77 +.[0][79]=78 +.[0][80]=79 +.[0][81]=80 +.[0][82]=81 +.[0][83]=82 +.[0][84]=83 +.[0][85]=84 +.[0][86]=85 +.[0][87]=86 +.[0][88]=87 +.[0][89]=88 +.[0][90]=89 +.[0][91]=90 +.[0][92]=91 +.[0][93]=92 +.[0][94]=93 +.[0][95]=94 +.[0][96]=95 +.[0][97]=96 +.[0][98]=97 +.[0][99]=98 +.[0][100]=99 +.[0][101]=100 +.[0][102]=101 +.[0][103]=102 +.[0][104]=103 +.[0][105]=104 +.[0][106]=105 +.[0][107]=106 +.[0][108]=107 +.[0][109]=108 +.[0][110]=109 +.[0][111]=110 +.[0][112]=111 +.[0][113]=112 +.[0][114]=113 +.[0][115]=114 +.[0][116]=115 +.[0][117]=116 +.[0][118]=117 +.[0][119]=118 +.[0][120]=119 +.[0][121]=120 +.[0][122]=121 +.[0][123]=122 +.[0][124]=123 +.[0][125]=124 +.[0][126]=125 +.[0][127]=126 +.[0][128]=127 +.[0][129]=128 +.[0][130]=129 +.[0][131]=130 +.[0][132]=131 +.[0][133]=132 +.[0][134]=133 +.[0][135]=134 +.[0][136]=135 +.[0][137]=136 +.[0][138]=137 +.[0][139]=138 +.[0][140]=139 +.[0][141]=140 +.[0][142]=141 +.[0][143]=142 +.[0][144]=143 +.[0][145]=144 +.[0][146]=145 +.[0][147]=146 +.[0][148]=147 +.[0][149]=148 +.[0][150]=149 +.[0][151]=150 +.[0][152]=151 +.[0][153]=152 +.[0][154]=153 +.[0][155]=154 +.[0][156]=155 +.[0][157]=156 +.[0][158]=157 +.[0][159]=158 +.[0][160]=159 +.[0][161]=160 +.[0][162]=161 +.[0][163]=162 +.[0][164]=163 +.[0][165]=164 +.[0][166]=165 +.[0][167]=166 +.[0][168]=167 +.[0][169]=168 +.[0][170]=169 +.[0][171]=170 +.[0][172]=171 +.[0][173]=172 +.[0][174]=173 +.[0][175]=174 +.[0][176]=175 +.[0][177]=176 +.[0][178]=177 +.[0][179]=178 +.[0][180]=179 +.[0][181]=180 +.[0][182]=181 +.[0][183]=182 +.[0][184]=183 +.[0][185]=184 +.[0][186]=185 +.[0][187]=186 +.[0][188]=187 +.[0][189]=188 +.[0][190]=189 +.[0][191]=190 +.[0][192]=191 +.[0][193]=192 +.[0][194]=193 +.[0][195]=194 +.[0][196]=195 +.[0][197]=196 +.[0][198]=197 +.[0][199]=198 +.[0][200]=199 +.[0][201]=200 +.[0][202]=201 +.[0][203]=202 +.[0][204]=203 +.[0][205]=204 +.[0][206]=205 +.[0][207]=206 +.[0][208]=207 +.[0][209]=208 +.[0][210]=209 +.[0][211]=210 +.[0][212]=211 +.[0][213]=212 +.[0][214]=213 +.[0][215]=214 +.[0][216]=215 +.[0][217]=216 +.[0][218]=217 +.[0][219]=218 +.[0][220]=219 +.[0][221]=220 +.[0][222]=221 +.[0][223]=222 +.[0][224]=223 +.[0][225]=224 +.[0][226]=225 +.[0][227]=226 +.[0][228]=227 +.[0][229]=228 +.[0][230]=229 +.[0][231]=230 +.[0][232]=231 +.[0][233]=232 +.[0][234]=233 +.[0][235]=234 +.[0][236]=235 +.[0][237]=236 +.[0][238]=237 +.[0][239]=238 +.[0][240]=239 +.[0][241]=240 +.[0][242]=241 +.[0][243]=242 +.[0][244]=243 +.[0][245]=244 +.[0][246]=245 +.[0][247]=246 +.[0][248]=247 +.[0][249]=248 +.[0][250]=249 +.[0][251]=250 +.[0][252]=251 +.[0][253]=252 +.[0][254]=253 +.[0][255]=254 +.[0][256]=255 +.[0][257]=256 +.[0][258]=257 +.[0][259]=258 +.[0][260]=259 +.[0][261]=260 +.[0][262]=261 +.[0][263]=262 +.[0][264]=263 +.[0][265]=264 +.[0][266]=265 +.[0][267]=266 +.[0][268]=267 +.[0][269]=268 +.[0][270]=269 +.[0][271]=270 +.[0][272]=271 +.[0][273]=272 +.[0][274]=273 +.[0][275]=274 +.[0][276]=275 +.[0][277]=276 +.[0][278]=277 +.[0][279]=278 +.[0][280]=279 +.[0][281]=280 +.[0][282]=281 +.[0][283]=282 +.[0][284]=283 +.[0][285]=284 +.[0][286]=285 +.[0][287]=286 +.[0][288]=287 +.[0][289]=288 +.[0][290]=289 +.[0][291]=290 +.[0][292]=291 +.[0][293]=292 +.[0][294]=293 +.[0][295]=294 +.[0][296]=295 +.[0][297]=296 +.[0][298]=297 +.[0][299]=298 +.[0][300]=299 +.[0][301]=300 +.[0][302]=301 +.[0][303]=302 +.[0][304]=303 +.[0][305]=304 +.[0][306]=305 +.[0][307]=306 +.[0][308]=307 +.[0][309]=308 +.[0][310]=309 +.[0][311]=310 +.[0][312]=311 +.[0][313]=312 +.[0][314]=313 +.[0][315]=314 +.[0][316]=315 +.[0][317]=316 +.[0][318]=317 +.[0][319]=318 +.[0][320]=319 +.[0][321]=320 +.[0][322]=321 +.[0][323]=322 +.[0][324]=323 +.[0][325]=324 +.[0][326]=325 +.[0][327]=326 +.[0][328]=327 +.[0][329]=328 +.[0][330]=329 +.[0][331]=330 +.[0][332]=331 +.[0][333]=332 +.[0][334]=333 +.[0][335]=334 +.[0][336]=335 +.[0][337]=336 +.[0][338]=337 +.[0][339]=338 +.[0][340]=339 +.[0][341]=340 +.[0][342]=341 +.[0][343]=342 +.[0][344]=343 +.[0][345]=344 +.[0][346]=345 +.[0][347]=346 +.[0][348]=347 +.[0][349]=348 +.[0][350]=349 +.[0][351]=350 +.[0][352]=351 +.[0][353]=352 +.[0][354]=353 +.[0][355]=354 +.[0][356]=355 +.[0][357]=356 +.[0][358]=357 +.[0][359]=358 +.[0][360]=359 +.[0][361]=360 +.[0][362]=361 +.[0][363]=362 +.[0][364]=363 +.[0][365]=364 +.[0][366]=365 +.[0][367]=366 +.[0][368]=367 +.[0][369]=368 +.[0][370]=369 +.[0][371]=370 +.[0][372]=371 +.[0][373]=372 +.[0][374]=373 +.[0][375]=374 +.[0][376]=375 +.[0][377]=376 +.[0][378]=377 +.[0][379]=378 +.[0][380]=379 +.[0][381]=380 +.[0][382]=381 +.[0][383]=382 +.[0][384]=383 +.[0][385]=384 +.[0][386]=385 +.[0][387]=386 +.[0][388]=387 +.[0][389]=388 +.[0][390]=389 +.[0][391]=390 +.[0][392]=391 +.[0][393]=392 +.[0][394]=393 +.[0][395]=394 +.[0][396]=395 +.[0][397]=396 +.[0][398]=397 +.[0][399]=398 +.[0][400]=399 +.[0][401]=400 +.[0][402]=401 +.[0][403]=402 +.[0][404]=403 +.[0][405]=404 +.[0][406]=405 +.[0][407]=406 +.[0][408]=407 +.[0][409]=408 +.[0][410]=409 +.[0][411]=410 +.[0][412]=411 +.[0][413]=412 +.[0][414]=413 +.[0][415]=414 +.[0][416]=415 +.[0][417]=416 +.[0][418]=417 +.[0][419]=418 +.[0][420]=419 +.[0][421]=420 +.[0][422]=421 +.[0][423]=422 +.[0][424]=423 +.[0][425]=424 +.[0][426]=425 +.[0][427]=426 +.[0][428]=427 +.[0][429]=428 +.[0][430]=429 +.[0][431]=430 +.[0][432]=431 +.[0][433]=432 +.[0][434]=433 +.[0][435]=434 +.[0][436]=435 +.[0][437]=436 +.[0][438]=437 +.[0][439]=438 +.[0][440]=439 +.[0][441]=440 +.[0][442]=441 +.[0][443]=442 +.[0][444]=443 +.[0][445]=444 +.[0][446]=445 +.[0][447]=446 +.[0][448]=447 +.[0][449]=448 +.[0][450]=449 +.[0][451]=450 +.[0][452]=451 +.[0][453]=452 +.[0][454]=453 +.[0][455]=454 +.[0][456]=455 +.[0][457]=456 +.[0][458]=457 +.[0][459]=458 +.[0][460]=459 +.[0][461]=460 +.[0][462]=461 +.[0][463]=462 +.[0][464]=463 +.[0][465]=464 +.[0][466]=465 +.[0][467]=466 +.[0][468]=467 +.[0][469]=468 +.[0][470]=469 +.[0][471]=470 +.[0][472]=471 +.[0][473]=472 +.[0][474]=473 +.[0][475]=474 +.[0][476]=475 +.[0][477]=476 +.[0][478]=477 +.[0][479]=478 +.[0][480]=479 +.[0][481]=480 +.[0][482]=481 +.[0][483]=482 +.[0][484]=483 +.[0][485]=484 +.[0][486]=485 +.[0][487]=486 +.[0][488]=487 +.[0][489]=488 +.[0][490]=489 +.[0][491]=490 +.[0][492]=491 +.[0][493]=492 +.[0][494]=493 +.[0][495]=494 +.[0][496]=495 +.[0][497]=496 +.[0][498]=497 +.[0][499]=498 +.[0][500]=499 +.[0][501]=500 +.[0][502]=501 +.[0][503]=502 +.[0][504]=503 +.[0][505]=504 +.[0][506]=505 +.[0][507]=506 +.[0][508]=507 +.[0][509]=508 +.[0][510]=509 +.[0][511]=510 +.[0][512]=511 +.[0][513]=512 +.[0][514]=513 +.[0][515]=514 +.[0][516]=515 +.[0][517]=516 +.[0][518]=517 +.[0][519]=518 +.[0][520]=519 +.[0][521]=520 +.[0][522]=521 +.[0][523]=522 +.[0][524]=523 +.[0][525]=524 +.[0][526]=525 +.[0][527]=526 +.[0][528]=527 +.[0][529]=528 +.[0][530]=529 +.[0][531]=530 +.[0][532]=531 +.[0][533]=532 +.[0][534]=533 +.[0][535]=534 +.[0][536]=535 +.[0][537]=536 +.[0][538]=537 +.[0][539]=538 +.[0][540]=539 +.[0][541]=540 +.[0][542]=541 +.[0][543]=542 +.[0][544]=543 +.[0][545]=544 +.[0][546]=545 +.[0][547]=546 +.[0][548]=547 +.[0][549]=548 +.[0][550]=549 +.[0][551]=550 +.[0][552]=551 +.[0][553]=552 +.[0][554]=553 +.[0][555]=554 +.[0][556]=555 +.[0][557]=556 +.[0][558]=557 +.[0][559]=558 +.[0][560]=559 +.[0][561]=560 +.[0][562]=561 +.[0][563]=562 +.[0][564]=563 +.[0][565]=564 +.[0][566]=565 +.[0][567]=566 +.[0][568]=567 +.[0][569]=568 +.[0][570]=569 +.[0][571]=570 +.[0][572]=571 +.[0][573]=572 +.[0][574]=573 +.[0][575]=574 +.[0][576]=575 +.[0][577]=576 +.[0][578]=577 +.[0][579]=578 +.[0][580]=579 +.[0][581]=580 +.[0][582]=581 +.[0][583]=582 +.[0][584]=583 +.[0][585]=584 +.[0][586]=585 +.[0][587]=586 +.[0][588]=587 +.[0][589]=588 +.[0][590]=589 +.[0][591]=590 +.[0][592]=591 +.[0][593]=592 +.[0][594]=593 +.[0][595]=594 +.[0][596]=595 +.[0][597]=596 +.[0][598]=597 +.[0][599]=598 +.[0][600]=599 +.[0][601]=600 +.[0][602]=601 +.[0][603]=602 +.[0][604]=603 +.[0][605]=604 +.[0][606]=605 +.[0][607]=606 +.[0][608]=607 +.[0][609]=608 +.[0][610]=609 +.[0][611]=610 +.[0][612]=611 +.[0][613]=612 +.[0][614]=613 +.[0][615]=614 +.[0][616]=615 +.[0][617]=616 +.[0][618]=617 +.[0][619]=618 +.[0][620]=619 +.[0][621]=620 +.[0][622]=621 +.[0][623]=622 +.[0][624]=623 +.[0][625]=624 +.[0][626]=625 +.[0][627]=626 +.[0][628]=627 +.[0][629]=628 +.[0][630]=629 +.[0][631]=630 +.[0][632]=631 +.[0][633]=632 +.[0][634]=633 +.[0][635]=634 +.[0][636]=635 +.[0][637]=636 +.[0][638]=637 +.[0][639]=638 +.[0][640]=639 +.[0][641]=640 +.[0][642]=641 +.[0][643]=642 +.[0][644]=643 +.[0][645]=644 +.[0][646]=645 +.[0][647]=646 +.[0][648]=647 +.[0][649]=648 +.[0][650]=649 +.[0][651]=650 +.[0][652]=651 +.[0][653]=652 +.[0][654]=653 +.[0][655]=654 +.[0][656]=655 +.[0][657]=656 +.[0][658]=657 +.[0][659]=658 +.[0][660]=659 +.[0][661]=660 +.[0][662]=661 +.[0][663]=662 +.[0][664]=663 +.[0][665]=664 +.[0][666]=665 +.[0][667]=666 +.[0][668]=667 +.[0][669]=668 +.[0][670]=669 +.[0][671]=670 +.[0][672]=671 +.[0][673]=672 +.[0][674]=673 +.[0][675]=674 +.[0][676]=675 +.[0][677]=676 +.[0][678]=677 +.[0][679]=678 +.[0][680]=679 +.[0][681]=680 +.[0][682]=681 +.[0][683]=682 +.[0][684]=683 +.[0][685]=684 +.[0][686]=685 +.[0][687]=686 +.[0][688]=687 +.[0][689]=688 +.[0][690]=689 +.[0][691]=690 +.[0][692]=691 +.[0][693]=692 +.[0][694]=693 +.[0][695]=694 +.[0][696]=695 +.[0][697]=696 +.[0][698]=697 +.[0][699]=698 +.[0][700]=699 +.[0][701]=700 +.[0][702]=701 +.[0][703]=702 +.[0][704]=703 +.[0][705]=704 +.[0][706]=705 +.[0][707]=706 +.[0][708]=707 +.[0][709]=708 +.[0][710]=709 +.[0][711]=710 +.[0][712]=711 +.[0][713]=712 +.[0][714]=713 +.[0][715]=714 +.[0][716]=715 +.[0][717]=716 +.[0][718]=717 +.[0][719]=718 +.[0][720]=719 +.[0][721]=720 +.[0][722]=721 +.[0][723]=722 +.[0][724]=723 +.[0][725]=724 +.[0][726]=725 +.[0][727]=726 +.[0][728]=727 +.[0][729]=728 +.[0][730]=729 +.[0][731]=730 +.[0][732]=731 +.[0][733]=732 +.[0][734]=733 +.[0][735]=734 +.[0][736]=735 +.[0][737]=736 +.[0][738]=737 +.[0][739]=738 +.[0][740]=739 +.[0][741]=740 +.[0][742]=741 +.[0][743]=742 +.[0][744]=743 +.[0][745]=744 +.[0][746]=745 +.[0][747]=746 +.[0][748]=747 +.[0][749]=748 +.[0][750]=749 +.[0][751]=750 +.[0][752]=751 +.[0][753]=752 +.[0][754]=753 +.[0][755]=754 +.[0][756]=755 +.[0][757]=756 +.[0][758]=757 +.[0][759]=758 +.[0][760]=759 +.[0][761]=760 +.[0][762]=761 +.[0][763]=762 +.[0][764]=763 +.[0][765]=764 +.[0][766]=765 +.[0][767]=766 +.[0][768]=767 +.[0][769]=768 +.[0][770]=769 +.[0][771]=770 +.[0][772]=771 +.[0][773]=772 +.[0][774]=773 +.[0][775]=774 +.[0][776]=775 +.[0][777]=776 +.[0][778]=777 +.[0][779]=778 +.[0][780]=779 +.[0][781]=780 +.[0][782]=781 +.[0][783]=782 +.[0][784]=783 +.[0][785]=784 +.[0][786]=785 +.[0][787]=786 +.[0][788]=787 +.[0][789]=788 +.[0][790]=789 +.[0][791]=790 +.[0][792]=791 +.[0][793]=792 +.[0][794]=793 +.[0][795]=794 +.[0][796]=795 +.[0][797]=796 +.[0][798]=797 +.[0][799]=798 +.[0][800]=799 +.[0][801]=800 +.[0][802]=801 +.[0][803]=802 +.[0][804]=803 +.[0][805]=804 +.[0][806]=805 +.[0][807]=806 +.[0][808]=807 +.[0][809]=808 +.[0][810]=809 +.[0][811]=810 +.[0][812]=811 +.[0][813]=812 +.[0][814]=813 +.[0][815]=814 +.[0][816]=815 +.[0][817]=816 +.[0][818]=817 +.[0][819]=818 +.[0][820]=819 +.[0][821]=820 +.[0][822]=821 +.[0][823]=822 +.[0][824]=823 +.[0][825]=824 +.[0][826]=825 +.[0][827]=826 +.[0][828]=827 +.[0][829]=828 +.[0][830]=829 +.[0][831]=830 +.[0][832]=831 +.[0][833]=832 +.[0][834]=833 +.[0][835]=834 +.[0][836]=835 +.[0][837]=836 +.[0][838]=837 +.[0][839]=838 +.[0][840]=839 +.[0][841]=840 +.[0][842]=841 +.[0][843]=842 +.[0][844]=843 +.[0][845]=844 +.[0][846]=845 +.[0][847]=846 +.[0][848]=847 +.[0][849]=848 +.[0][850]=849 +.[0][851]=850 +.[0][852]=851 +.[0][853]=852 +.[0][854]=853 +.[0][855]=854 +.[0][856]=855 +.[0][857]=856 +.[0][858]=857 +.[0][859]=858 +.[0][860]=859 +.[0][861]=860 +.[0][862]=861 +.[0][863]=862 +.[0][864]=863 +.[0][865]=864 +.[0][866]=865 +.[0][867]=866 +.[0][868]=867 +.[0][869]=868 +.[0][870]=869 +.[0][871]=870 +.[0][872]=871 +.[0][873]=872 +.[0][874]=873 +.[0][875]=874 +.[0][876]=875 +.[0][877]=876 +.[0][878]=877 +.[0][879]=878 +.[0][880]=879 +.[0][881]=880 +.[0][882]=881 +.[0][883]=882 +.[0][884]=883 +.[0][885]=884 +.[0][886]=885 +.[0][887]=886 +.[0][888]=887 +.[0][889]=888 +.[0][890]=889 +.[0][891]=890 +.[0][892]=891 +.[0][893]=892 +.[0][894]=893 +.[0][895]=894 +.[0][896]=895 +.[0][897]=896 +.[0][898]=897 +.[0][899]=898 +.[0][900]=899 +.[0][901]=900 +.[0][902]=901 +.[0][903]=902 +.[0][904]=903 +.[0][905]=904 +.[0][906]=905 +.[0][907]=906 +.[0][908]=907 +.[0][909]=908 +.[0][910]=909 +.[0][911]=910 +.[0][912]=911 +.[0][913]=912 +.[0][914]=913 +.[0][915]=914 +.[0][916]=915 +.[0][917]=916 +.[0][918]=917 +.[0][919]=918 +.[0][920]=919 +.[0][921]=920 +.[0][922]=921 +.[0][923]=922 +.[0][924]=923 +.[0][925]=924 +.[0][926]=925 +.[0][927]=926 +.[0][928]=927 +.[0][929]=928 +.[0][930]=929 +.[0][931]=930 +.[0][932]=931 +.[0][933]=932 +.[0][934]=933 +.[0][935]=934 +.[0][936]=935 +.[0][937]=936 +.[0][938]=937 +.[0][939]=938 +.[0][940]=939 +.[0][941]=940 +.[0][942]=941 +.[0][943]=942 +.[0][944]=943 +.[0][945]=944 +.[0][946]=945 +.[0][947]=946 +.[0][948]=947 +.[0][949]=948 +.[0][950]=949 +.[0][951]=950 +.[0][952]=951 +.[0][953]=952 +.[0][954]=953 +.[0][955]=954 +.[0][956]=955 +.[0][957]=956 +.[0][958]=957 +.[0][959]=958 +.[0][960]=959 +.[0][961]=960 +.[0][962]=961 +.[0][963]=962 +.[0][964]=963 +.[0][965]=964 +.[0][966]=965 +.[0][967]=966 +.[0][968]=967 +.[0][969]=968 +.[0][970]=969 +.[0][971]=970 +.[0][972]=971 +.[0][973]=972 +.[0][974]=973 +.[0][975]=974 +.[0][976]=975 +.[0][977]=976 +.[0][978]=977 +.[0][979]=978 +.[0][980]=979 +.[0][981]=980 +.[0][982]=981 +.[0][983]=982 +.[0][984]=983 +.[0][985]=984 +.[0][986]=985 +.[0][987]=986 +.[0][988]=987 +.[0][989]=988 +.[0][990]=989 +.[0][991]=990 +.[0][992]=991 +.[0][993]=992 +.[0][994]=993 +.[0][995]=994 +.[0][996]=995 +.[0][997]=996 +.[0][998]=997 +.[0][999]=998 +.[0][1000]=999 +.[0][1001]=1000 +.[0][1002]=1001 +.[0][1003]=1002 +.[0][1004]=1003 +.[0][1005]=1004 +.[0][1006]=1005 +.[0][1007]=1006 +.[0][1008]=1007 +.[0][1009]=1008 +.[0][1010]=1009 +.[0][1011]=1010 +.[0][1012]=1011 +.[0][1013]=1012 +.[0][1014]=1013 +.[0][1015]=1014 +.[0][1016]=1015 +.[0][1017]=1016 +.[0][1018]=1017 +.[0][1019]=1018 +.[0][1020]=1019 +.[0][1021]=1020 +.[0][1022]=1021 +.[0][1023]=1022 +.[0][1024]=1023 +.[0][1025]=1024 +.[0][1026]=1025 +.[0][1027]=1026 +.[0][1028]=1027 +.[0][1029]=1028 +.[0][1030]=1029 +.[0][1031]=1030 +.[0][1032]=1031 +.[0][1033]=1032 +.[0][1034]=1033 +.[0][1035]=1034 +.[0][1036]=1035 +.[0][1037]=1036 +.[0][1038]=1037 +.[0][1039]=1038 +.[0][1040]=1039 +.[0][1041]=1040 +.[0][1042]=1041 +.[0][1043]=1042 +.[0][1044]=1043 +.[0][1045]=1044 +.[0][1046]=1045 +.[0][1047]=1046 +.[0][1048]=1047 +.[0][1049]=1048 +.[0][1050]=1049 +.[0][1051]=1050 +.[0][1052]=1051 +.[0][1053]=1052 +.[0][1054]=1053 +.[0][1055]=1054 +.[0][1056]=1055 +.[0][1057]=1056 +.[0][1058]=1057 +.[0][1059]=1058 +.[0][1060]=1059 +.[0][1061]=1060 +.[0][1062]=1061 +.[0][1063]=1062 +.[0][1064]=1063 +.[0][1065]=1064 +.[0][1066]=1065 +.[0][1067]=1066 +.[0][1068]=1067 +.[0][1069]=1068 +.[0][1070]=1069 +.[0][1071]=1070 +.[0][1072]=1071 +.[0][1073]=1072 +.[0][1074]=1073 +.[0][1075]=1074 +.[0][1076]=1075 +.[0][1077]=1076 +.[0][1078]=1077 +.[0][1079]=1078 +.[0][1080]=1079 +.[0][1081]=1080 +.[0][1082]=1081 +.[0][1083]=1082 +.[0][1084]=1083 +.[0][1085]=1084 +.[0][1086]=1085 +.[0][1087]=1086 +.[0][1088]=1087 +.[0][1089]=1088 +.[0][1090]=1089 +.[0][1091]=1090 +.[0][1092]=1091 +.[0][1093]=1092 +.[0][1094]=1093 +.[0][1095]=1094 +.[0][1096]=1095 +.[0][1097]=1096 +.[0][1098]=1097 +.[0][1099]=1098 +.[0][1100]=1099 +.[0][1101]=1100 +.[0][1102]=1101 +.[0][1103]=1102 +.[0][1104]=1103 +.[0][1105]=1104 +.[0][1106]=1105 +.[0][1107]=1106 +.[0][1108]=1107 +.[0][1109]=1108 +.[0][1110]=1109 +.[0][1111]=1110 +.[0][1112]=1111 +.[0][1113]=1112 +.[0][1114]=1113 +.[0][1115]=1114 +.[0][1116]=1115 +.[0][1117]=1116 +.[0][1118]=1117 +.[0][1119]=1118 +.[0][1120]=1119 +.[0][1121]=1120 +.[0][1122]=1121 +.[0][1123]=1122 +.[0][1124]=1123 +.[0][1125]=1124 +.[0][1126]=1125 +.[0][1127]=1126 +.[0][1128]=1127 +.[0][1129]=1128 +.[0][1130]=1129 +.[0][1131]=1130 +.[0][1132]=1131 +.[0][1133]=1132 +.[0][1134]=1133 +.[0][1135]=1134 +.[0][1136]=1135 +.[0][1137]=1136 +.[0][1138]=1137 +.[0][1139]=1138 +.[0][1140]=1139 +.[0][1141]=1140 +.[0][1142]=1141 +.[0][1143]=1142 +.[0][1144]=1143 +.[0][1145]=1144 +.[0][1146]=1145 +.[0][1147]=1146 +.[0][1148]=1147 +.[0][1149]=1148 +.[0][1150]=1149 +.[0][1151]=1150 +.[0][1152]=1151 +.[0][1153]=1152 +.[0][1154]=1153 +.[0][1155]=1154 +.[0][1156]=1155 +.[0][1157]=1156 +.[0][1158]=1157 +.[0][1159]=1158 +.[0][1160]=1159 +.[0][1161]=1160 +.[0][1162]=1161 +.[0][1163]=1162 +.[0][1164]=1163 +.[0][1165]=1164 +.[0][1166]=1165 +.[0][1167]=1166 +.[0][1168]=1167 +.[0][1169]=1168 +.[0][1170]=1169 +.[0][1171]=1170 +.[0][1172]=1171 +.[0][1173]=1172 +.[0][1174]=1173 +.[0][1175]=1174 +.[0][1176]=1175 +.[0][1177]=1176 +.[0][1178]=1177 +.[0][1179]=1178 +.[0][1180]=1179 +.[0][1181]=1180 +.[0][1182]=1181 +.[0][1183]=1182 +.[0][1184]=1183 +.[0][1185]=1184 +.[0][1186]=1185 +.[0][1187]=1186 +.[0][1188]=1187 +.[0][1189]=1188 +.[0][1190]=1189 +.[0][1191]=1190 +.[0][1192]=1191 +.[0][1193]=1192 +.[0][1194]=1193 +.[0][1195]=1194 +.[0][1196]=1195 +.[0][1197]=1196 +.[0][1198]=1197 +.[0][1199]=1198 +.[0][1200]=1199 +.[0][1201]=1200 +.[0][1202]=1201 +.[0][1203]=1202 +.[0][1204]=1203 +.[0][1205]=1204 +.[0][1206]=1205 +.[0][1207]=1206 +.[0][1208]=1207 +.[0][1209]=1208 +.[0][1210]=1209 +.[0][1211]=1210 +.[0][1212]=1211 +.[0][1213]=1212 +.[0][1214]=1213 +.[0][1215]=1214 +.[0][1216]=1215 +.[0][1217]=1216 +.[0][1218]=1217 +.[0][1219]=1218 +.[0][1220]=1219 +.[0][1221]=1220 +.[0][1222]=1221 +.[0][1223]=1222 +.[0][1224]=1223 +.[0][1225]=1224 +.[0][1226]=1225 +.[0][1227]=1226 +.[0][1228]=1227 +.[0][1229]=1228 +.[0][1230]=1229 +.[0][1231]=1230 +.[0][1232]=1231 +.[0][1233]=1232 +.[0][1234]=1233 +.[0][1235]=1234 +.[0][1236]=1235 +.[0][1237]=1236 +.[0][1238]=1237 +.[0][1239]=1238 +.[0][1240]=1239 +.[0][1241]=1240 +.[0][1242]=1241 +.[0][1243]=1242 +.[0][1244]=1243 +.[0][1245]=1244 +.[0][1246]=1245 +.[0][1247]=1246 +.[0][1248]=1247 +.[0][1249]=1248 +.[0][1250]=1249 +.[0][1251]=1250 +.[0][1252]=1251 +.[0][1253]=1252 +.[0][1254]=1253 +.[0][1255]=1254 +.[0][1256]=1255 +.[0][1257]=1256 +.[0][1258]=1257 +.[0][1259]=1258 +.[0][1260]=1259 +.[0][1261]=1260 +.[0][1262]=1261 +.[0][1263]=1262 +.[0][1264]=1263 +.[0][1265]=1264 +.[0][1266]=1265 +.[0][1267]=1266 +.[0][1268]=1267 +.[0][1269]=1268 +.[0][1270]=1269 +.[0][1271]=1270 +.[0][1272]=1271 +.[0][1273]=1272 +.[0][1274]=1273 +.[0][1275]=1274 +.[0][1276]=1275 +.[0][1277]=1276 +.[0][1278]=1277 +.[0][1279]=1278 +.[0][1280]=1279 +.[0][1281]=1280 +.[0][1282]=1281 +.[0][1283]=1282 +.[0][1284]=1283 +.[0][1285]=1284 +.[0][1286]=1285 +.[0][1287]=1286 +.[0][1288]=1287 +.[0][1289]=1288 +.[0][1290]=1289 +.[0][1291]=1290 +.[0][1292]=1291 +.[0][1293]=1292 +.[0][1294]=1293 +.[0][1295]=1294 +.[0][1296]=1295 +.[0][1297]=1296 +.[0][1298]=1297 +.[0][1299]=1298 +.[0][1300]=1299 +.[0][1301]=1300 +.[0][1302]=1301 +.[0][1303]=1302 +.[0][1304]=1303 +.[0][1305]=1304 +.[0][1306]=1305 +.[0][1307]=1306 +.[0][1308]=1307 +.[0][1309]=1308 +.[0][1310]=1309 +.[0][1311]=1310 +.[0][1312]=1311 +.[0][1313]=1312 +.[0][1314]=1313 +.[0][1315]=1314 +.[0][1316]=1315 +.[0][1317]=1316 +.[0][1318]=1317 +.[0][1319]=1318 +.[0][1320]=1319 +.[0][1321]=1320 +.[0][1322]=1321 +.[0][1323]=1322 +.[0][1324]=1323 +.[0][1325]=1324 +.[0][1326]=1325 +.[0][1327]=1326 +.[0][1328]=1327 +.[0][1329]=1328 +.[0][1330]=1329 +.[0][1331]=1330 +.[0][1332]=1331 +.[0][1333]=1332 +.[0][1334]=1333 +.[0][1335]=1334 +.[0][1336]=1335 +.[0][1337]=1336 +.[0][1338]=1337 +.[0][1339]=1338 +.[0][1340]=1339 +.[0][1341]=1340 +.[0][1342]=1341 +.[0][1343]=1342 +.[0][1344]=1343 +.[0][1345]=1344 +.[0][1346]=1345 +.[0][1347]=1346 +.[0][1348]=1347 +.[0][1349]=1348 +.[0][1350]=1349 +.[0][1351]=1350 +.[0][1352]=1351 +.[0][1353]=1352 +.[0][1354]=1353 +.[0][1355]=1354 +.[0][1356]=1355 +.[0][1357]=1356 +.[0][1358]=1357 +.[0][1359]=1358 +.[0][1360]=1359 +.[0][1361]=1360 +.[0][1362]=1361 +.[0][1363]=1362 +.[0][1364]=1363 +.[0][1365]=1364 +.[0][1366]=1365 +.[0][1367]=1366 +.[0][1368]=1367 +.[0][1369]=1368 +.[0][1370]=1369 +.[0][1371]=1370 +.[0][1372]=1371 +.[0][1373]=1372 +.[0][1374]=1373 +.[0][1375]=1374 +.[0][1376]=1375 +.[0][1377]=1376 +.[0][1378]=1377 +.[0][1379]=1378 +.[0][1380]=1379 +.[0][1381]=1380 +.[0][1382]=1381 +.[0][1383]=1382 +.[0][1384]=1383 +.[0][1385]=1384 +.[0][1386]=1385 +.[0][1387]=1386 +.[0][1388]=1387 +.[0][1389]=1388 +.[0][1390]=1389 +.[0][1391]=1390 +.[0][1392]=1391 +.[0][1393]=1392 +.[0][1394]=1393 +.[0][1395]=1394 +.[0][1396]=1395 +.[0][1397]=1396 +.[0][1398]=1397 +.[0][1399]=1398 +.[0][1400]=1399 +.[0][1401]=1400 +.[0][1402]=1401 +.[0][1403]=1402 +.[0][1404]=1403 +.[0][1405]=1404 +.[0][1406]=1405 +.[0][1407]=1406 +.[0][1408]=1407 +.[0][1409]=1408 +.[0][1410]=1409 +.[0][1411]=1410 +.[0][1412]=1411 +.[0][1413]=1412 +.[0][1414]=1413 +.[0][1415]=1414 +.[0][1416]=1415 +.[0][1417]=1416 +.[0][1418]=1417 +.[0][1419]=1418 +.[0][1420]=1419 +.[0][1421]=1420 +.[0][1422]=1421 +.[0][1423]=1422 +.[0][1424]=1423 +.[0][1425]=1424 +.[0][1426]=1425 +.[0][1427]=1426 +.[0][1428]=1427 +.[0][1429]=1428 +.[0][1430]=1429 +.[0][1431]=1430 +.[0][1432]=1431 +.[0][1433]=1432 +.[0][1434]=1433 +.[0][1435]=1434 +.[0][1436]=1435 +.[0][1437]=1436 +.[0][1438]=1437 +.[0][1439]=1438 +.[0][1440]=1439 +.[0][1441]=1440 +.[0][1442]=1441 +.[0][1443]=1442 +.[0][1444]=1443 +.[0][1445]=1444 +.[0][1446]=1445 +.[0][1447]=1446 +.[0][1448]=1447 +.[0][1449]=1448 +.[0][1450]=1449 +.[0][1451]=1450 +.[0][1452]=1451 +.[0][1453]=1452 +.[0][1454]=1453 +.[0][1455]=1454 +.[0][1456]=1455 +.[0][1457]=1456 +.[0][1458]=1457 +.[0][1459]=1458 +.[0][1460]=1459 +.[0][1461]=1460 +.[0][1462]=1461 +.[0][1463]=1462 +.[0][1464]=1463 +.[0][1465]=1464 +.[0][1466]=1465 +.[0][1467]=1466 +.[0][1468]=1467 +.[0][1469]=1468 +.[0][1470]=1469 +.[0][1471]=1470 +.[0][1472]=1471 +.[0][1473]=1472 +.[0][1474]=1473 +.[0][1475]=1474 +.[0][1476]=1475 +.[0][1477]=1476 +.[0][1478]=1477 +.[0][1479]=1478 +.[0][1480]=1479 +.[0][1481]=1480 +.[0][1482]=1481 +.[0][1483]=1482 +.[0][1484]=1483 +.[0][1485]=1484 +.[0][1486]=1485 +.[0][1487]=1486 +.[0][1488]=1487 +.[0][1489]=1488 +.[0][1490]=1489 +.[0][1491]=1490 +.[0][1492]=1491 +.[0][1493]=1492 +.[0][1494]=1493 +.[0][1495]=1494 +.[0][1496]=1495 +.[0][1497]=1496 +.[0][1498]=1497 +.[0][1499]=1498 +.[0][1500]=1499 +.[0][1501]=1500 +.[0][1502]=1501 +.[0][1503]=1502 +.[0][1504]=1503 +.[0][1505]=1504 +.[0][1506]=1505 +.[0][1507]=1506 +.[0][1508]=1507 +.[0][1509]=1508 +.[0][1510]=1509 +.[0][1511]=1510 +.[0][1512]=1511 +.[0][1513]=1512 +.[0][1514]=1513 +.[0][1515]=1514 +.[0][1516]=1515 +.[0][1517]=1516 +.[0][1518]=1517 +.[0][1519]=1518 +.[0][1520]=1519 +.[0][1521]=1520 +.[0][1522]=1521 +.[0][1523]=1522 +.[0][1524]=1523 +.[0][1525]=1524 +.[0][1526]=1525 +.[0][1527]=1526 +.[0][1528]=1527 +.[0][1529]=1528 +.[0][1530]=1529 +.[0][1531]=1530 +.[0][1532]=1531 +.[0][1533]=1532 +.[0][1534]=1533 +.[0][1535]=1534 +.[0][1536]=1535 +.[0][1537]=1536 +.[0][1538]=1537 +.[0][1539]=1538 +.[0][1540]=1539 +.[0][1541]=1540 +.[0][1542]=1541 +.[0][1543]=1542 +.[0][1544]=1543 +.[0][1545]=1544 +.[0][1546]=1545 +.[0][1547]=1546 +.[0][1548]=1547 +.[0][1549]=1548 +.[0][1550]=1549 +.[0][1551]=1550 +.[0][1552]=1551 +.[0][1553]=1552 +.[0][1554]=1553 +.[0][1555]=1554 +.[0][1556]=1555 +.[0][1557]=1556 +.[0][1558]=1557 +.[0][1559]=1558 +.[0][1560]=1559 +.[0][1561]=1560 +.[0][1562]=1561 +.[0][1563]=1562 +.[0][1564]=1563 +.[0][1565]=1564 +.[0][1566]=1565 +.[0][1567]=1566 +.[0][1568]=1567 +.[0][1569]=1568 +.[0][1570]=1569 +.[0][1571]=1570 +.[0][1572]=1571 +.[0][1573]=1572 +.[0][1574]=1573 +.[0][1575]=1574 +.[0][1576]=1575 +.[0][1577]=1576 +.[0][1578]=1577 +.[0][1579]=1578 +.[0][1580]=1579 +.[0][1581]=1580 +.[0][1582]=1581 +.[0][1583]=1582 +.[0][1584]=1583 +.[0][1585]=1584 +.[0][1586]=1585 +.[0][1587]=1586 +.[0][1588]=1587 +.[0][1589]=1588 +.[0][1590]=1589 +.[0][1591]=1590 +.[0][1592]=1591 +.[0][1593]=1592 +.[0][1594]=1593 +.[0][1595]=1594 +.[0][1596]=1595 +.[0][1597]=1596 +.[0][1598]=1597 +.[0][1599]=1598 +.[0][1600]=1599 +.[0][1601]=1600 +.[0][1602]=1601 +.[0][1603]=1602 +.[0][1604]=1603 +.[0][1605]=1604 +.[0][1606]=1605 +.[0][1607]=1606 +.[0][1608]=1607 +.[0][1609]=1608 +.[0][1610]=1609 +.[0][1611]=1610 +.[0][1612]=1611 +.[0][1613]=1612 +.[0][1614]=1613 +.[0][1615]=1614 +.[0][1616]=1615 +.[0][1617]=1616 +.[0][1618]=1617 +.[0][1619]=1618 +.[0][1620]=1619 +.[0][1621]=1620 +.[0][1622]=1621 +.[0][1623]=1622 +.[0][1624]=1623 +.[0][1625]=1624 +.[0][1626]=1625 +.[0][1627]=1626 +.[0][1628]=1627 +.[0][1629]=1628 +.[0][1630]=1629 +.[0][1631]=1630 +.[0][1632]=1631 +.[0][1633]=1632 +.[0][1634]=1633 +.[0][1635]=1634 +.[0][1636]=1635 +.[0][1637]=1636 +.[0][1638]=1637 +.[0][1639]=1638 +.[0][1640]=1639 +.[0][1641]=1640 +.[0][1642]=1641 +.[0][1643]=1642 +.[0][1644]=1643 +.[0][1645]=1644 +.[0][1646]=1645 +.[0][1647]=1646 +.[0][1648]=1647 +.[0][1649]=1648 +.[0][1650]=1649 +.[0][1651]=1650 +.[0][1652]=1651 +.[0][1653]=1652 +.[0][1654]=1653 +.[0][1655]=1654 +.[0][1656]=1655 +.[0][1657]=1656 +.[0][1658]=1657 +.[0][1659]=1658 +.[0][1660]=1659 +.[0][1661]=1660 +.[0][1662]=1661 +.[0][1663]=1662 +.[0][1664]=1663 +.[0][1665]=1664 +.[0][1666]=1665 +.[0][1667]=1666 +.[0][1668]=1667 +.[0][1669]=1668 +.[0][1670]=1669 +.[0][1671]=1670 +.[0][1672]=1671 +.[0][1673]=1672 +.[0][1674]=1673 +.[0][1675]=1674 +.[0][1676]=1675 +.[0][1677]=1676 +.[0][1678]=1677 +.[0][1679]=1678 +.[0][1680]=1679 +.[0][1681]=1680 +.[0][1682]=1681 +.[0][1683]=1682 +.[0][1684]=1683 +.[0][1685]=1684 +.[0][1686]=1685 +.[0][1687]=1686 +.[0][1688]=1687 +.[0][1689]=1688 +.[0][1690]=1689 +.[0][1691]=1690 +.[0][1692]=1691 +.[0][1693]=1692 +.[0][1694]=1693 +.[0][1695]=1694 +.[0][1696]=1695 +.[0][1697]=1696 +.[0][1698]=1697 +.[0][1699]=1698 +.[0][1700]=1699 +.[0][1701]=1700 +.[0][1702]=1701 +.[0][1703]=1702 +.[0][1704]=1703 +.[0][1705]=1704 +.[0][1706]=1705 +.[0][1707]=1706 +.[0][1708]=1707 +.[0][1709]=1708 +.[0][1710]=1709 +.[0][1711]=1710 +.[0][1712]=1711 +.[0][1713]=1712 +.[0][1714]=1713 +.[0][1715]=1714 +.[0][1716]=1715 +.[0][1717]=1716 +.[0][1718]=1717 +.[0][1719]=1718 +.[0][1720]=1719 +.[0][1721]=1720 +.[0][1722]=1721 +.[0][1723]=1722 +.[0][1724]=1723 +.[0][1725]=1724 +.[0][1726]=1725 +.[0][1727]=1726 +.[0][1728]=1727 +.[0][1729]=1728 +.[0][1730]=1729 +.[0][1731]=1730 +.[0][1732]=1731 +.[0][1733]=1732 +.[0][1734]=1733 +.[0][1735]=1734 +.[0][1736]=1735 +.[0][1737]=1736 +.[0][1738]=1737 +.[0][1739]=1738 +.[0][1740]=1739 +.[0][1741]=1740 +.[0][1742]=1741 +.[0][1743]=1742 +.[0][1744]=1743 +.[0][1745]=1744 +.[0][1746]=1745 +.[0][1747]=1746 +.[0][1748]=1747 +.[0][1749]=1748 +.[0][1750]=1749 +.[0][1751]=1750 +.[0][1752]=1751 +.[0][1753]=1752 +.[0][1754]=1753 +.[0][1755]=1754 +.[0][1756]=1755 +.[0][1757]=1756 +.[0][1758]=1757 +.[0][1759]=1758 +.[0][1760]=1759 +.[0][1761]=1760 +.[0][1762]=1761 +.[0][1763]=1762 +.[0][1764]=1763 +.[0][1765]=1764 +.[0][1766]=1765 +.[0][1767]=1766 +.[0][1768]=1767 +.[0][1769]=1768 +.[0][1770]=1769 +.[0][1771]=1770 +.[0][1772]=1771 +.[0][1773]=1772 +.[0][1774]=1773 +.[0][1775]=1774 +.[0][1776]=1775 +.[0][1777]=1776 +.[0][1778]=1777 +.[0][1779]=1778 +.[0][1780]=1779 +.[0][1781]=1780 +.[0][1782]=1781 +.[0][1783]=1782 +.[0][1784]=1783 +.[0][1785]=1784 +.[0][1786]=1785 +.[0][1787]=1786 +.[0][1788]=1787 +.[0][1789]=1788 +.[0][1790]=1789 +.[0][1791]=1790 +.[0][1792]=1791 +.[0][1793]=1792 +.[0][1794]=1793 +.[0][1795]=1794 +.[0][1796]=1795 +.[0][1797]=1796 +.[0][1798]=1797 +.[0][1799]=1798 +.[0][1800]=1799 +.[0][1801]=1800 +.[0][1802]=1801 +.[0][1803]=1802 +.[0][1804]=1803 +.[0][1805]=1804 +.[0][1806]=1805 +.[0][1807]=1806 +.[0][1808]=1807 +.[0][1809]=1808 +.[0][1810]=1809 +.[0][1811]=1810 +.[0][1812]=1811 +.[0][1813]=1812 +.[0][1814]=1813 +.[0][1815]=1814 +.[0][1816]=1815 +.[0][1817]=1816 +.[0][1818]=1817 +.[0][1819]=1818 +.[0][1820]=1819 +.[0][1821]=1820 +.[0][1822]=1821 +.[0][1823]=1822 +.[0][1824]=1823 +.[0][1825]=1824 +.[0][1826]=1825 +.[0][1827]=1826 +.[0][1828]=1827 +.[0][1829]=1828 +.[0][1830]=1829 +.[0][1831]=1830 +.[0][1832]=1831 +.[0][1833]=1832 +.[0][1834]=1833 +.[0][1835]=1834 +.[0][1836]=1835 +.[0][1837]=1836 +.[0][1838]=1837 +.[0][1839]=1838 +.[0][1840]=1839 +.[0][1841]=1840 +.[0][1842]=1841 +.[0][1843]=1842 +.[0][1844]=1843 +.[0][1845]=1844 +.[0][1846]=1845 +.[0][1847]=1846 +.[0][1848]=1847 +.[0][1849]=1848 +.[0][1850]=1849 +.[0][1851]=1850 +.[0][1852]=1851 +.[0][1853]=1852 +.[0][1854]=1853 +.[0][1855]=1854 +.[0][1856]=1855 +.[0][1857]=1856 +.[0][1858]=1857 +.[0][1859]=1858 +.[0][1860]=1859 +.[0][1861]=1860 +.[0][1862]=1861 +.[0][1863]=1862 +.[0][1864]=1863 +.[0][1865]=1864 +.[0][1866]=1865 +.[0][1867]=1866 +.[0][1868]=1867 +.[0][1869]=1868 +.[0][1870]=1869 +.[0][1871]=1870 +.[0][1872]=1871 +.[0][1873]=1872 +.[0][1874]=1873 +.[0][1875]=1874 +.[0][1876]=1875 +.[0][1877]=1876 +.[0][1878]=1877 +.[0][1879]=1878 +.[0][1880]=1879 +.[0][1881]=1880 +.[0][1882]=1881 +.[0][1883]=1882 +.[0][1884]=1883 +.[0][1885]=1884 +.[0][1886]=1885 +.[0][1887]=1886 +.[0][1888]=1887 +.[0][1889]=1888 +.[0][1890]=1889 +.[0][1891]=1890 +.[0][1892]=1891 +.[0][1893]=1892 +.[0][1894]=1893 +.[0][1895]=1894 +.[0][1896]=1895 +.[0][1897]=1896 +.[0][1898]=1897 +.[0][1899]=1898 +.[0][1900]=1899 +.[0][1901]=1900 +.[0][1902]=1901 +.[0][1903]=1902 +.[0][1904]=1903 +.[0][1905]=1904 +.[0][1906]=1905 +.[0][1907]=1906 +.[0][1908]=1907 +.[0][1909]=1908 +.[0][1910]=1909 +.[0][1911]=1910 +.[0][1912]=1911 +.[0][1913]=1912 +.[0][1914]=1913 +.[0][1915]=1914 +.[0][1916]=1915 +.[0][1917]=1916 +.[0][1918]=1917 +.[0][1919]=1918 +.[0][1920]=1919 +.[0][1921]=1920 +.[0][1922]=1921 +.[0][1923]=1922 +.[0][1924]=1923 +.[0][1925]=1924 +.[0][1926]=1925 +.[0][1927]=1926 +.[0][1928]=1927 +.[0][1929]=1928 +.[0][1930]=1929 +.[0][1931]=1930 +.[0][1932]=1931 +.[0][1933]=1932 +.[0][1934]=1933 +.[0][1935]=1934 +.[0][1936]=1935 +.[0][1937]=1936 +.[0][1938]=1937 +.[0][1939]=1938 +.[0][1940]=1939 +.[0][1941]=1940 +.[0][1942]=1941 +.[0][1943]=1942 +.[0][1944]=1943 +.[0][1945]=1944 +.[0][1946]=1945 +.[0][1947]=1946 +.[0][1948]=1947 +.[0][1949]=1948 +.[0][1950]=1949 +.[0][1951]=1950 +.[0][1952]=1951 +.[0][1953]=1952 +.[0][1954]=1953 +.[0][1955]=1954 +.[0][1956]=1955 +.[0][1957]=1956 +.[0][1958]=1957 +.[0][1959]=1958 +.[0][1960]=1959 +.[0][1961]=1960 +.[0][1962]=1961 +.[0][1963]=1962 +.[0][1964]=1963 +.[0][1965]=1964 +.[0][1966]=1965 +.[0][1967]=1966 +.[0][1968]=1967 +.[0][1969]=1968 +.[0][1970]=1969 +.[0][1971]=1970 +.[0][1972]=1971 +.[0][1973]=1972 +.[0][1974]=1973 +.[0][1975]=1974 +.[0][1976]=1975 +.[0][1977]=1976 +.[0][1978]=1977 +.[0][1979]=1978 +.[0][1980]=1979 +.[0][1981]=1980 +.[0][1982]=1981 +.[0][1983]=1982 +.[0][1984]=1983 +.[0][1985]=1984 +.[0][1986]=1985 +.[0][1987]=1986 +.[0][1988]=1987 +.[0][1989]=1988 +.[0][1990]=1989 +.[0][1991]=1990 +.[0][1992]=1991 +.[0][1993]=1992 +.[0][1994]=1993 +.[0][1995]=1994 +.[0][1996]=1995 +.[0][1997]=1996 +.[0][1998]=1997 +.[0][1999]=1998 +.[0][2000]=1999 +.[0][2001]=2000 +.[0][2002]=2001 +.[0][2003]=2002 +.[0][2004]=2003 +.[0][2005]=2004 +.[0][2006]=2005 +.[0][2007]=2006 +.[0][2008]=2007 +.[0][2009]=2008 +.[0][2010]=2009 +.[0][2011]=2010 +.[0][2012]=2011 +.[0][2013]=2012 +.[0][2014]=2013 +.[0][2015]=2014 +.[0][2016]=2015 +.[0][2017]=2016 +.[0][2018]=2017 +.[0][2019]=2018 +.[0][2020]=2019 +.[0][2021]=2020 +.[0][2022]=2021 +.[0][2023]=2022 +.[0][2024]=2023 +.[0][2025]=2024 +.[0][2026]=2025 +.[0][2027]=2026 +.[0][2028]=2027 +.[0][2029]=2028 +.[0][2030]=2029 +.[0][2031]=2030 +.[0][2032]=2031 +.[0][2033]=2032 +.[0][2034]=2033 +.[0][2035]=2034 +.[0][2036]=2035 +.[0][2037]=2036 +.[0][2038]=2037 +.[0][2039]=2038 +.[0][2040]=2039 +.[0][2041]=2040 +.[0][2042]=2041 +.[0][2043]=2042 +.[0][2044]=2043 +.[0][2045]=2044 +.[0][2046]=2045 +.[0][2047]=2046 +.[0][2048]=2047 +.[0][2049]=2048 +.[0][2050]=2049 +.[0][2051]=2050 +.[0][2052]=2051 +.[0][2053]=2052 +.[0][2054]=2053 +.[0][2055]=2054 +.[0][2056]=2055 +.[0][2057]=2056 +.[0][2058]=2057 +.[0][2059]=2058 +.[0][2060]=2059 +.[0][2061]=2060 +.[0][2062]=2061 +.[0][2063]=2062 +.[0][2064]=2063 +.[0][2065]=2064 +.[0][2066]=2065 +.[0][2067]=2066 +.[0][2068]=2067 +.[0][2069]=2068 +.[0][2070]=2069 +.[0][2071]=2070 +.[0][2072]=2071 +.[0][2073]=2072 +.[0][2074]=2073 +.[0][2075]=2074 +.[0][2076]=2075 +.[0][2077]=2076 +.[0][2078]=2077 +.[0][2079]=2078 +.[0][2080]=2079 +.[0][2081]=2080 +.[0][2082]=2081 +.[0][2083]=2082 +.[0][2084]=2083 +.[0][2085]=2084 +.[0][2086]=2085 +.[0][2087]=2086 +.[0][2088]=2087 +.[0][2089]=2088 +.[0][2090]=2089 +.[0][2091]=2090 +.[0][2092]=2091 +.[0][2093]=2092 +.[0][2094]=2093 +.[0][2095]=2094 +.[0][2096]=2095 +.[0][2097]=2096 +.[0][2098]=2097 +.[0][2099]=2098 +.[0][2100]=2099 +.[0][2101]=2100 +.[0][2102]=2101 +.[0][2103]=2102 +.[0][2104]=2103 +.[0][2105]=2104 +.[0][2106]=2105 +.[0][2107]=2106 +.[0][2108]=2107 +.[0][2109]=2108 +.[0][2110]=2109 +.[0][2111]=2110 +.[0][2112]=2111 +.[0][2113]=2112 +.[0][2114]=2113 +.[0][2115]=2114 +.[0][2116]=2115 +.[0][2117]=2116 +.[0][2118]=2117 +.[0][2119]=2118 diff --git a/trunk/jsoncpp/test/data/test_array_07.json b/trunk/jsoncpp/test/data/test_array_07.json new file mode 100644 index 0000000..e4ab4cd --- /dev/null +++ b/trunk/jsoncpp/test/data/test_array_07.json @@ -0,0 +1,2 @@ +[["A",0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,570,571,572,573,574,575,576,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,599,600,601,602,603,604,605,606,607,608,609,610,611,612,613,614,615,616,617,618,619,620,621,622,623,624,625,626,627,628,629,630,631,632,633,634,635,636,637,638,639,640,641,642,643,644,645,646,647,648,649,650,651,652,653,654,655,656,657,658,659,660,661,662,663,664,665,666,667,668,669,670,671,672,673,674,675,676,677,678,679,680,681,682,683,684,685,686,687,688,689,690,691,692,693,694,695,696,697,698,699,700,701,702,703,704,705,706,707,708,709,710,711,712,713,714,715,716,717,718,719,720,721,722,723,724,725,726,727,728,729,730,731,732,733,734,735,736,737,738,739,740,741,742,743,744,745,746,747,748,749,750,751,752,753,754,755,756,757,758,759,760,761,762,763,764,765,766,767,768,769,770,771,772,773,774,775,776,777,778,779,780,781,782,783,784,785,786,787,788,789,790,791,792,793,794,795,796,797,798,799,800,801,802,803,804,805,806,807,808,809,810,811,812,813,814,815,816,817,818,819,820,821,822,823,824,825,826,827,828,829,830,831,832,833,834,835,836,837,838,839,840,841,842,843,844,845,846,847,848,849,850,851,852,853,854,855,856,857,858,859,860,861,862,863,864,865,866,867,868,869,870,871,872,873,874,875,876,877,878,879,880,881,882,883,884,885,886,887,888,889,890,891,892,893,894,895,896,897,898,899,900,901,902,903,904,905,906,907,908,909,910,911,912,913,914,915,916,917,918,919,920,921,922,923,924,925,926,927,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,945,946,947,948,949,950,951,952,953,954,955,956,957,958,959,960,961,962,963,964,965,966,967,968,969,970,971,972,973,974,975,976,977,978,979,980,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1000,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1101,1102,1103,1104,1105,1106,1107,1108,1109,1110,1111,1112,1113,1114,1115,1116,1117,1118,1119,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1156,1157,1158,1159,1160,1161,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1330,1331,1332,1333,1334,1335,1336,1337,1338,1339,1340,1341,1342,1343,1344,1345,1346,1347,1348,1349,1350,1351,1352,1353,1354,1355,1356,1357,1358,1359,1360,1361,1362,1363,1364,1365,1366,1367,1368,1369,1370,1371,1372,1373,1374,1375,1376,1377,1378,1379,1380,1381,1382,1383,1384,1385,1386,1387,1388,1389,1390,1391,1392,1393,1394,1395,1396,1397,1398,1399,1400,1401,1402,1403,1404,1405,1406,1407,1408,1409,1410,1411,1412,1413,1414,1415,1416,1417,1418,1419,1420,1421,1422,1423,1424,1425,1426,1427,1428,1429,1430,1431,1432,1433,1434,1435,1436,1437,1438,1439,1440,1441,1442,1443,1444,1445,1446,1447,1448,1449,1450,1451,1452,1453,1454,1455,1456,1457,1458,1459,1460,1461,1462,1463,1464,1465,1466,1467,1468,1469,1470,1471,1472,1473,1474,1475,1476,1477,1478,1479,1480,1481,1482,1483,1484,1485,1486,1487,1488,1489,1490,1491,1492,1493,1494,1495,1496,1497,1498,1499,1500,1501,1502,1503,1504,1505,1506,1507,1508,1509,1510,1511,1512,1513,1514,1515,1516,1517,1518,1519,1520,1521,1522,1523,1524,1525,1526,1527,1528,1529,1530,1531,1532,1533,1534,1535,1536,1537,1538,1539,1540,1541,1542,1543,1544,1545,1546,1547,1548,1549,1550,1551,1552,1553,1554,1555,1556,1557,1558,1559,1560,1561,1562,1563,1564,1565,1566,1567,1568,1569,1570,1571,1572,1573,1574,1575,1576,1577,1578,1579,1580,1581,1582,1583,1584,1585,1586,1587,1588,1589,1590,1591,1592,1593,1594,1595,1596,1597,1598,1599,1600,1601,1602,1603,1604,1605,1606,1607,1608,1609,1610,1611,1612,1613,1614,1615,1616,1617,1618,1619,1620,1621,1622,1623,1624,1625,1626,1627,1628,1629,1630,1631,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1643,1644,1645,1646,1647,1648,1649,1650,1651,1652,1653,1654,1655,1656,1657,1658,1659,1660,1661,1662,1663,1664,1665,1666,1667,1668,1669,1670,1671,1672,1673,1674,1675,1676,1677,1678,1679,1680,1681,1682,1683,1684,1685,1686,1687,1688,1689,1690,1691,1692,1693,1694,1695,1696,1697,1698,1699,1700,1701,1702,1703,1704,1705,1706,1707,1708,1709,1710,1711,1712,1713,1714,1715,1716,1717,1718,1719,1720,1721,1722,1723,1724,1725,1726,1727,1728,1729,1730,1731,1732,1733,1734,1735,1736,1737,1738,1739,1740,1741,1742,1743,1744,1745,1746,1747,1748,1749,1750,1751,1752,1753,1754,1755,1756,1757,1758,1759,1760,1761,1762,1763,1764,1765,1766,1767,1768,1769,1770,1771,1772,1773,1774,1775,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1787,1788,1789,1790,1791,1792,1793,1794,1795,1796,1797,1798,1799,1800,1801,1802,1803,1804,1805,1806,1807,1808,1809,1810,1811,1812,1813,1814,1815,1816,1817,1818,1819,1820,1821,1822,1823,1824,1825,1826,1827,1828,1829,1830,1831,1832,1833,1834,1835,1836,1837,1838,1839,1840,1841,1842,1843,1844,1845,1846,1847,1848,1849,1850,1851,1852,1853,1854,1855,1856,1857,1858,1859,1860,1861,1862,1863,1864,1865,1866,1867,1868,1869,1870,1871,1872,1873,1874,1875,1876,1877,1878,1879,1880,1881,1882,1883,1884,1885,1886,1887,1888,1889,1890,1891,1892,1893,1894,1895,1896,1897,1898,1899,1900,1901,1902,1903,1904,1905,1906,1907,1908,1909,1910,1911,1912,1913,1914,1915,1916,1917,1918,1919,1920,1921,1922,1923,1924,1925,1926,1927,1928,1929,1930,1931,1932,1933,1934,1935,1936,1937,1938,1939,1940,1941,1942,1943,1944,1945,1946,1947,1948,1949,1950,1951,1952,1953,1954,1955,1956,1957,1958,1959,1960,1961,1962,1963,1964,1965,1966,1967,1968,1969,1970,1971,1972,1973,1974,1975,1976,1977,1978,1979,1980,1981,1982,1983,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,1995,1996,1997,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020,2021,2022,2023,2024,2025,2026,2027,2028,2029,2030,2031,2032,2033,2034,2035,2036,2037,2038,2039,2040,2041,2042,2043,2044,2045,2046,2047,2048,2049,2050,2051,2052,2053,2054,2055,2056,2057,2058,2059,2060,2061,2062,2063,2064,2065,2066,2067,2068,2069,2070,2071,2072,2073,2074,2075,2076,2077,2078,2079,2080,2081,2082,2083,2084,2085,2086,2087,2088,2089,2090,2091,2092,2093,2094,2095,2096,2097,2098,2099,2100,2101,2102,2103,2104,2105,2106,2107,2108,2109,2110,2111,2112,2113,2114,2115,2116,2117,2118] +] \ No newline at end of file diff --git a/trunk/jsoncpp/test/data/test_string_04.expected b/trunk/jsoncpp/test/data/test_string_04.expected new file mode 100644 index 0000000..f57d525 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_04.expected @@ -0,0 +1,2 @@ +.=""abc\def"" + diff --git a/trunk/jsoncpp/test/data/test_string_04.json b/trunk/jsoncpp/test/data/test_string_04.json new file mode 100644 index 0000000..01fe752 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_04.json @@ -0,0 +1,2 @@ +"\"abc\\def\"" + diff --git a/trunk/jsoncpp/test/data/test_string_05.expected b/trunk/jsoncpp/test/data/test_string_05.expected new file mode 100644 index 0000000..9794ddd --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_05.expected @@ -0,0 +1,2 @@ +.="\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" + diff --git a/trunk/jsoncpp/test/data/test_string_05.json b/trunk/jsoncpp/test/data/test_string_05.json new file mode 100644 index 0000000..e156024 --- /dev/null +++ b/trunk/jsoncpp/test/data/test_string_05.json @@ -0,0 +1,2 @@ +"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\" + From 2e45954872106cbf5f6e80d65af27f0ab9cd48f4 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 26 May 2011 06:58:52 +0000 Subject: [PATCH 230/268] Fixed some test bugs that show up when 64-bit mode is disabled. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@230 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 14f49c1..d31d28c 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -697,7 +697,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(double(kint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64max), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("9.22337204e18", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", val.asString()); // int64 min val = Json::Value(double(kint64min)); @@ -716,7 +716,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(double(kint64min), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kint64min), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("-9.22337204e18", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", val.asString()); // uint64 max val = Json::Value(double(kuint64max)); @@ -735,7 +735,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.84467441e19", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", val.asString()); #else // ifdef JSON_NO_INT64 // 2^40 (signed constructor arg) val = Json::Value(1LL << 40); From f97088641ddc1bc3d52afd03752f29ea33cfda47 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 26 May 2011 07:32:36 +0000 Subject: [PATCH 231/268] - Fixed unit test compilation on MSVS 2003, 2005 and 2008. - Worked-around unit test failure with MSVS* by "forcing" all floating-point numbers to be loaded from memory instead of FPU registers. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@231 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/main.cpp | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index d31d28c..b38bd78 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -3,7 +3,6 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#include #include #include @@ -11,12 +10,19 @@ #include "jsontest.h" // Make numeric limits more convenient to talk about. -#define kint32max std::numeric_limits::max() -#define kint32min std::numeric_limits::min() -#define kuint32max std::numeric_limits::max() -#define kint64max std::numeric_limits::max() -#define kint64min std::numeric_limits::min() -#define kuint64max std::numeric_limits::max() +// Assumes int type in 32 bits. +#define kint32max std::numeric_limits::max() +#define kint32min std::numeric_limits::min() +#define kuint32max std::numeric_limits::max() +#define kint64max std::numeric_limits::max() +#define kint64min std::numeric_limits::min() +#define kuint64max std::numeric_limits::max() + +static const double kdint64max = double(kint64max); +static const float kfint64max = float(kint64max); +static const float kfint32max = float(kint32max); +static const float kfuint32max = float(kuint32max); + // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// @@ -623,7 +629,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(kint32max, val.asUInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(kint32max, val.asDouble()); - JSONTEST_ASSERT_EQUAL(kint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(kfint32max, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("2147483647", val.asString()); @@ -675,7 +681,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(kuint32max, val.asUInt()); JSONTEST_ASSERT_EQUAL(kuint32max, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(kuint32max, val.asDouble()); - JSONTEST_ASSERT_EQUAL(kuint32max, val.asFloat()); + JSONTEST_ASSERT_EQUAL(kfuint32max, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("4294967295", val.asString()); From f8ec6167831c7daabaaa49b745d90f6648008d81 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 26 May 2011 17:14:26 +0000 Subject: [PATCH 232/268] Fixed unit test failure on IBM AIX xlC by hard-coding the maxUInt64AsDouble as double constant instead of relying on double(Value::maxUInt64) which produces an incorrect value. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@232 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 98ce606..79478b8 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -35,6 +35,10 @@ const UInt Value::maxUInt = UInt(-1); const Int64 Value::minInt64 = Int64( ~(UInt64(-1)/2) ); const Int64 Value::maxInt64 = Int64( UInt64(-1)/2 ); const UInt64 Value::maxUInt64 = UInt64(-1); +// The constant is hard-coded because some compiler have trouble +// converting Value::maxUInt64 to a double correctly (AIX/xlC). +// Assumes that UInt64 is a 64 bits integer. +static const double maxUInt64AsDouble = 18446744073709551615.0; #endif // defined(JSON_HAS_INT64) const LargestInt Value::minLargestInt = LargestInt( ~(LargestUInt(-1)/2) ); const LargestInt Value::maxLargestInt = LargestInt( LargestUInt(-1)/2 ); @@ -1443,7 +1447,7 @@ Value::isUInt64() const // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we // require the value to be strictly less than the limit. return value_.real_ >= 0 && - value_.real_ < double(maxUInt64) && + value_.real_ < maxUInt64AsDouble && IsIntegral(value_.real_); default: break; From 5503411b322dd8f6951a22dd7c52ea5d46f9c3db Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 26 May 2011 20:14:32 +0000 Subject: [PATCH 233/268] Fixed MSVS 2003, 2005 and 2008 tests execution by normalizing floating-point string representation using helper normalizeFloatingPointStr(). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@233 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/jsontestrunner/main.cpp | 28 ++++++++++- trunk/jsoncpp/src/test_lib_json/main.cpp | 59 ++++++++++++++++++++--- 2 files changed, 77 insertions(+), 10 deletions(-) diff --git a/trunk/jsoncpp/src/jsontestrunner/main.cpp b/trunk/jsoncpp/src/jsontestrunner/main.cpp index dfb6150..74f0216 100644 --- a/trunk/jsoncpp/src/jsontestrunner/main.cpp +++ b/trunk/jsoncpp/src/jsontestrunner/main.cpp @@ -15,6 +15,31 @@ # pragma warning( disable: 4996 ) // disable fopen deprecation warning #endif +static std::string +normalizeFloatingPointStr( double value ) +{ + char buffer[32]; + sprintf( buffer, "%.16g", value ); + buffer[sizeof(buffer)-1] = 0; + std::string s( buffer ); + std::string::size_type index = s.find_last_of( "eE" ); + if ( index != std::string::npos ) + { + std::string::size_type hasSign = (s[index+1] == '+' || s[index+1] == '-') ? 1 : 0; + std::string::size_type exponentStartIndex = index + 1 + hasSign; + std::string normalized = s.substr( 0, exponentStartIndex ); + std::string::size_type indexDigit = s.find_first_not_of( '0', exponentStartIndex ); + std::string exponent = "0"; + if ( indexDigit != std::string::npos ) // There is an exponent different from 0 + { + exponent = s.substr( indexDigit ); + } + return normalized + exponent; + } + return s; +} + + static std::string readInputTestFile( const char *path ) { @@ -34,7 +59,6 @@ readInputTestFile( const char *path ) return text; } - static void printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) { @@ -50,7 +74,7 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." ) fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() ); break; case Json::realValue: - fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() ); + fprintf( fout, "%s=%s\n", path.c_str(), normalizeFloatingPointStr(value.asDouble()).c_str() ); break; case Json::stringValue: fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() ); diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index b38bd78..a195542 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -97,9 +97,51 @@ struct ValueTest : JsonTest::TestCase void checkIsLess( const Json::Value &x, const Json::Value &y ); void checkIsEqual( const Json::Value &x, const Json::Value &y ); + + /// Normalize the representation of floating-point number by stripped leading 0 in exponent. + static std::string normalizeFloatingPointStr( const std::string &s ); }; +std::string +ValueTest::normalizeFloatingPointStr( const std::string &s ) +{ + std::string::size_type index = s.find_last_of( "eE" ); + if ( index != std::string::npos ) + { + std::string::size_type hasSign = (s[index+1] == '+' || s[index+1] == '-') ? 1 : 0; + std::string::size_type exponentStartIndex = index + 1 + hasSign; + std::string normalized = s.substr( 0, exponentStartIndex ); + std::string::size_type indexDigit = s.find_first_not_of( '0', exponentStartIndex ); + std::string exponent = "0"; + if ( indexDigit != std::string::npos ) // There is an exponent different from 0 + { + exponent = s.substr( indexDigit ); + } + return normalized + exponent; + } + return s; +} + + +JSONTEST_FIXTURE( ValueTest, checkNormalizeFloatingPointStr ) +{ + JSONTEST_ASSERT_STRING_EQUAL( "0.0", normalizeFloatingPointStr("0.0") ); + JSONTEST_ASSERT_STRING_EQUAL( "0e0", normalizeFloatingPointStr("0e0") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234.0", normalizeFloatingPointStr("1234.0") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234.0e0", normalizeFloatingPointStr("1234.0e0") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234.0e+0", normalizeFloatingPointStr("1234.0e+0") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e-1", normalizeFloatingPointStr("1234e-1") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e10", normalizeFloatingPointStr("1234e10") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e10", normalizeFloatingPointStr("1234e010") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e+10", normalizeFloatingPointStr("1234e+010") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e-10", normalizeFloatingPointStr("1234e-010") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e+100", normalizeFloatingPointStr("1234e+100") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e-100", normalizeFloatingPointStr("1234e-100") ); + JSONTEST_ASSERT_STRING_EQUAL( "1234e+1", normalizeFloatingPointStr("1234e+001") ); +} + + JSONTEST_FIXTURE( ValueTest, memberCount ) { JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) ); @@ -579,7 +621,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.04858e+06", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("1.04858e+6", normalizeFloatingPointStr(val.asString())); // -2^20 val = Json::Value(-(1 << 20)); @@ -819,7 +861,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.09951e+12", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("1.09951e+12", normalizeFloatingPointStr(val.asString())); // -2^40 val = Json::Value(-(1LL << 40)); @@ -892,7 +934,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asDouble()); JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", normalizeFloatingPointStr(val.asString())); // int64 min val = Json::Value(Json::Int64(kint64min)); @@ -939,7 +981,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", normalizeFloatingPointStr(val.asString())); // uint64 max val = Json::Value(Json::UInt64(kuint64max)); @@ -982,7 +1024,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", normalizeFloatingPointStr(val.asString())); #endif } @@ -1073,7 +1115,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(2147483647U, val.asLargestUInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("2.14748e+09", val.asString()); + JSONTEST_ASSERT_EQUAL("2.14748e+9", normalizeFloatingPointStr(val.asString())); // A bit under int32 min val = Json::Value(kint32min - 0.5); @@ -1100,7 +1142,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(-2147483648LL, val.asLargestInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("-2.14748e+09", val.asString()); + JSONTEST_ASSERT_EQUAL("-2.14748e+9", normalizeFloatingPointStr(val.asString())); // A bit over uint32 max val = Json::Value(kuint32max + 0.5); @@ -1128,7 +1170,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(4294967295ULL, val.asLargestUInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("4.29497e+09", val.asString()); + JSONTEST_ASSERT_EQUAL("4.29497e+9", normalizeFloatingPointStr(val.asString())); } @@ -1351,6 +1393,7 @@ ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) int main( int argc, const char *argv[] ) { JsonTest::Runner runner; + JSONTEST_REGISTER_FIXTURE( runner, ValueTest, checkNormalizeFloatingPointStr ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, memberCount ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, objects ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, arrays ); From 628440352146ab2858280498b151c5c8d165d571 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 26 May 2011 22:55:24 +0000 Subject: [PATCH 234/268] Fixed compilation issues with MSVC 6: replace usage of ostringstream with valueToString to support 64 bits integer and high precision floating point conversion to string. Replace usage of ULL and LL literal with UInt64(expr) and Int64(expr). Introduced helper function uint64ToDouble() to work-around missing conversion. Unit tests do not pass yet. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@234 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 35 ++++-- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 17 +++ trunk/jsoncpp/src/test_lib_json/jsontest.h | 11 +- trunk/jsoncpp/src/test_lib_json/main.cpp | 115 ++++++++++--------- 4 files changed, 111 insertions(+), 67 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 79478b8..0149abd 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -48,10 +48,28 @@ const LargestUInt Value::maxLargestUInt = LargestUInt(-1); /// Unknown size marker static const unsigned int unknown = (unsigned)-1; +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +template +static inline bool InRange(double d, T min, U max) { + return d >= min && d <= max; +} +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double integerToDouble( Json::UInt64 value ) +{ + return static_cast( UInt(value >> 32) ) * (UInt64(1)<<32) + UInt(value & 0xffffffff); +} + +template +static inline double integerToDouble( T value ) +{ + return static_cast( value ); +} + template static inline bool InRange(double d, T min, U max) { - return d >= min && d <= max; + return d >= integerToDouble(min) && d <= integerToDouble(max); } +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) /** Duplicates the specified string value. @@ -673,9 +691,6 @@ Value::asCString() const std::string Value::asString() const { - // Let the STL sort it out for numeric types. - std::ostringstream oss; - switch ( type_ ) { case nullValue: @@ -685,18 +700,14 @@ Value::asString() const case booleanValue: return value_.bool_ ? "true" : "false"; case intValue: - oss << value_.int_; - break; + return valueToString( value_.int_ ); case uintValue: - oss << value_.uint_; - break; + return valueToString( value_.uint_ ); case realValue: - oss << value_.real_; - break; + return valueToString( value_.real_ ); default: JSON_FAIL_MESSAGE( "Type is not convertible to string" ); } - return oss.str(); } # ifdef JSON_USE_CPPTL @@ -842,7 +853,7 @@ Value::asDouble() const #if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) return static_cast( value_.uint_ ); #else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); + return integerToDouble( value_.uint_ ); #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) case realValue: return value_.real_; diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index 46f2eea..327d344 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -249,6 +249,23 @@ TestResult::addToLastFailure( const std::string &message ) return *this; } +TestResult & +TestResult::operator << ( Json::Int64 value ) { + return addToLastFailure( Json::valueToString(value) ); +} + + +TestResult & +TestResult::operator << ( Json::UInt64 value ) { + return addToLastFailure( Json::valueToString(value) ); +} + + +TestResult & +TestResult::operator << ( bool value ) { + return addToLastFailure(value ? "true" : "false"); +} + // class TestCase // ////////////////////////////////////////////////////////////////// diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 28792a9..207692b 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -8,6 +8,7 @@ # include # include +# include # include # include # include @@ -90,14 +91,17 @@ namespace JsonTest { template TestResult &operator << ( const T& value ) { std::ostringstream oss; + oss.precision( 16 ); + oss.setf( std::ios_base::floatfield ); oss << value; return addToLastFailure(oss.str()); } // Specialized versions. - TestResult &operator << ( bool value ) { - return addToLastFailure(value ? "true" : "false"); - } + TestResult &operator << ( bool value ); + // std:ostream does not support 64bits integers on all STL implementation + TestResult &operator << ( Json::Int64 value ); + TestResult &operator << ( Json::UInt64 value ); private: TestResult &addToLastFailure( const std::string &message ); @@ -195,6 +199,7 @@ namespace JsonTest { return result; } + TestResult & checkStringEqual( TestResult &result, const std::string &expected, const std::string &actual, diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index a195542..773bf10 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -30,6 +30,17 @@ static const float kfuint32max = float(kuint32max); // ////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////// +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double uint64ToDouble( Json::UInt64 value ) +{ + return static_cast( value ); +} +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double uint64ToDouble( Json::UInt64 value ) +{ + return static_cast( Json::UInt(value >> 32) ) * (Json::UInt64(1)<<32) + Json::UInt(value & 0xffffffff); +} +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) struct ValueTest : JsonTest::TestCase { @@ -453,7 +464,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(false, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("0.0", val.asString()); // Zero (signed constructor arg) val = Json::Value(0); @@ -537,13 +548,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(0.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(0.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(false, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("0", val.asString()); + JSONTEST_ASSERT_STRING_EQUAL("0.0", val.asString()); // 2^20 (signed constructor arg) val = Json::Value(1 << 20); JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); - checks = IsCheck(); checks.isInt_ = true; checks.isInt64_ = true; @@ -568,7 +578,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_STRING_EQUAL("1048576", val.asString()); // 2^20 (unsigned constructor arg) - val = Json::Value(1u << 20); + val = Json::Value(Json::UInt(1 << 20)); JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); @@ -621,7 +631,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL((1 << 20), val.asDouble()); JSONTEST_ASSERT_EQUAL((1 << 20), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.04858e+6", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_STRING_EQUAL("1048576.0", normalizeFloatingPointStr(val.asString())); // -2^20 val = Json::Value(-(1 << 20)); @@ -786,7 +796,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", val.asString()); #else // ifdef JSON_NO_INT64 // 2^40 (signed constructor arg) - val = Json::Value(1LL << 40); + val = Json::Value(Json::Int64(1) << 40); JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); @@ -802,17 +812,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); // 2^40 (unsigned constructor arg) - val = Json::Value(1ULL << 40); + val = Json::Value(Json::UInt64(1) << 40); JSONTEST_ASSERT_EQUAL(Json::uintValue, val.type()); @@ -828,17 +838,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("1099511627776", val.asString()); // 2^40 (floating-point constructor arg) - val = Json::Value((1LL << 40) / 1.0); + val = Json::Value((Json::Int64(1) << 40) / 1.0); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); @@ -854,17 +864,17 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asUInt64()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asLargestUInt()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asDouble()); - JSONTEST_ASSERT_EQUAL((1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asUInt64()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 40), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.09951e+12", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_STRING_EQUAL("1099511627776.0", normalizeFloatingPointStr(val.asString())); // -2^40 - val = Json::Value(-(1LL << 40)); + val = Json::Value(-(Json::Int64(1) << 40)); JSONTEST_ASSERT_EQUAL(Json::intValue, val.type()); @@ -879,10 +889,10 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); - JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asInt64()); - JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asLargestInt()); - JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asDouble()); - JSONTEST_ASSERT_EQUAL(-(1LL << 40), val.asFloat()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asInt64()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asDouble()); + JSONTEST_ASSERT_EQUAL(-(Json::Int64(1) << 40), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("-1099511627776", val.asString()); @@ -929,12 +939,12 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); - JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asUInt64()); - JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asLargestUInt()); - JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asDouble()); - JSONTEST_ASSERT_EQUAL(9223372036854775808ULL, val.asFloat()); + JSONTEST_ASSERT_EQUAL(Json::UInt64(1) << 63, val.asUInt64()); + JSONTEST_ASSERT_EQUAL(Json::UInt64(1) << 63, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL(uint64ToDouble(Json::UInt64(1) << 63), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(Json::UInt64(1) << 63)), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("9.22337e+18", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_STRING_EQUAL("9.223372036854776e+18", normalizeFloatingPointStr(val.asString())); // int64 min val = Json::Value(Json::Int64(kint64min)); @@ -981,7 +991,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("-9.22337e+18", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_STRING_EQUAL("-9.223372036854776e+18", normalizeFloatingPointStr(val.asString())); // uint64 max val = Json::Value(Json::UInt64(kuint64max)); @@ -1001,14 +1011,14 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(kuint64max, val.asUInt64()); JSONTEST_ASSERT_EQUAL(kuint64max, val.asLargestUInt()); - JSONTEST_ASSERT_EQUAL(double(kuint64max), val.asDouble()); - JSONTEST_ASSERT_EQUAL(float(kuint64max), val.asFloat()); + JSONTEST_ASSERT_EQUAL(uint64ToDouble(kuint64max), val.asDouble()); + JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(kuint64max)), val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_STRING_EQUAL("18446744073709551615", val.asString()); // uint64 max (floating point constructor). Note that kuint64max is not // exactly representable as a double, and will be rounded up to be higher. - val = Json::Value(double(kuint64max)); + val = Json::Value(uint64ToDouble(kuint64max)); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); @@ -1024,7 +1034,7 @@ JSONTEST_FIXTURE( ValueTest, integers ) JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_STRING_EQUAL("1.84467e+19", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_STRING_EQUAL("1.844674407370955e+19", normalizeFloatingPointStr(val.asString())); #endif } @@ -1060,7 +1070,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(1, val.asUInt()); JSONTEST_ASSERT_EQUAL(1, val.asLargestUInt()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("1.5", val.asString()); + JSONTEST_ASSERT_EQUAL("1.50", val.asString()); // Small negative number val = Json::Value(-1.5); @@ -1086,7 +1096,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(-1, val.asInt()); JSONTEST_ASSERT_EQUAL(-1, val.asLargestInt()); JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("-1.5", val.asString()); + JSONTEST_ASSERT_EQUAL("-1.50", val.asString()); // A bit over int32 max val = Json::Value(kint32max + 0.5); @@ -1115,7 +1125,7 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(2147483647U, val.asLargestUInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("2.14748e+9", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_EQUAL("2147483647.50", normalizeFloatingPointStr(val.asString())); // A bit under int32 min val = Json::Value(kint32min - 0.5); @@ -1139,10 +1149,10 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(-2147483648.5, val.asDouble()); JSONTEST_ASSERT_EQUAL(float(-2147483648.5), val.asFloat()); #ifdef JSON_HAS_INT64 - JSONTEST_ASSERT_EQUAL(-2147483648LL, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL(-Json::Int64(1)<< 31, val.asLargestInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("-2.14748e+9", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_EQUAL("-2147483648.50", normalizeFloatingPointStr(val.asString())); // A bit over uint32 max val = Json::Value(kuint32max + 0.5); @@ -1166,11 +1176,14 @@ JSONTEST_FIXTURE( ValueTest, nonIntegers ) JSONTEST_ASSERT_EQUAL(4294967295.5, val.asDouble()); JSONTEST_ASSERT_EQUAL(float(4294967295.5), val.asFloat()); #ifdef JSON_HAS_INT64 - JSONTEST_ASSERT_EQUAL(4294967295LL, val.asLargestInt()); - JSONTEST_ASSERT_EQUAL(4294967295ULL, val.asLargestUInt()); + JSONTEST_ASSERT_EQUAL((Json::Int64(1) << 32)-1, val.asLargestInt()); + JSONTEST_ASSERT_EQUAL((Json::UInt64(1) << 32)-Json::UInt64(1), val.asLargestUInt()); #endif JSONTEST_ASSERT_EQUAL(true, val.asBool()); - JSONTEST_ASSERT_EQUAL("4.29497e+9", normalizeFloatingPointStr(val.asString())); + JSONTEST_ASSERT_EQUAL("4294967295.50", normalizeFloatingPointStr(val.asString())); + + val = Json::Value(1.2345678901234); + JSONTEST_ASSERT_STRING_EQUAL( "1.23456789012340", normalizeFloatingPointStr(val.asString())); } @@ -1243,7 +1256,6 @@ ValueTest::checkIs( const Json::Value &value, const IsCheck &check ) #endif } - JSONTEST_FIXTURE( ValueTest, compareNull ) { JSONTEST_ASSERT_PRED( checkIsEqual( Json::Value(), Json::Value() ) ); @@ -1389,7 +1401,6 @@ ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) JSONTEST_ASSERT( y.compare( x ) == 0 ); } - int main( int argc, const char *argv[] ) { JsonTest::Runner runner; From 373c88a763fd4ef468f3683439ee85370721f9e5 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 27 May 2011 08:12:41 +0000 Subject: [PATCH 235/268] Fixed unit tests execution on MSVC 6 by removing usage of std::numeric_limits. It was returning 0 value in some max cases. Fixed Value::asFloat() to use integerToDouble(). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@235 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 4 ++-- trunk/jsoncpp/src/test_lib_json/main.cpp | 16 +++++++--------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 0149abd..b629987 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -56,7 +56,7 @@ static inline bool InRange(double d, T min, U max) { #else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) static inline double integerToDouble( Json::UInt64 value ) { - return static_cast( UInt(value >> 32) ) * (UInt64(1)<<32) + UInt(value & 0xffffffff); + return static_cast( Int64(value/2) ) * 2.0 + Int64(value & 1); } template @@ -878,7 +878,7 @@ Value::asFloat() const #if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) return static_cast( value_.uint_ ); #else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) - return static_cast( Int(value_.uint_/2) ) * 2 + Int(value_.uint_ & 1); + return integerToDouble( value_.uint_ ); #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) case realValue: return static_cast( value_.real_ ); diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 773bf10..c6ab619 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -3,20 +3,18 @@ // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE -#include - #include #include #include "jsontest.h" // Make numeric limits more convenient to talk about. // Assumes int type in 32 bits. -#define kint32max std::numeric_limits::max() -#define kint32min std::numeric_limits::min() -#define kuint32max std::numeric_limits::max() -#define kint64max std::numeric_limits::max() -#define kint64min std::numeric_limits::min() -#define kuint64max std::numeric_limits::max() +#define kint32max Json::Value::maxInt +#define kint32min Json::Value::minInt +#define kuint32max Json::Value::maxUInt +#define kint64max Json::Value::maxInt64 +#define kint64min Json::Value::minInt64 +#define kuint64max Json::Value::maxUInt64 static const double kdint64max = double(kint64max); static const float kfint64max = float(kint64max); @@ -38,7 +36,7 @@ static inline double uint64ToDouble( Json::UInt64 value ) #else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) static inline double uint64ToDouble( Json::UInt64 value ) { - return static_cast( Json::UInt(value >> 32) ) * (Json::UInt64(1)<<32) + Json::UInt(value & 0xffffffff); + return static_cast( Json::Int64(value/2) ) * 2.0 + Json::Int64(value & 1); } #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) From cc7f66117927d288fe344b2d77421f2b15fef40b Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 20:34:40 +0000 Subject: [PATCH 236/268] Just testing whether I can still commit changes. I cannot tell my access-level from the sf project page. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@236 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 51a098a..88c1178 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -170,3 +170,4 @@ test_complex_01.process-output: jsontest.exe output, typically useful to See file LICENSE for details. Basically JsonCpp is licensed under MIT license, or public domain if desired and recognized in your jurisdiction. + From a239bb2c8e74bd19971622ef834e010201cef0db Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 21:18:49 +0000 Subject: [PATCH 237/268] Switched CRLF to LF in repo, and added svn:eol-style native. I might have missed a few files though. Just committing what I have so far. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@237 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/amalgamate.py | 294 +++++++++++----------- trunk/jsoncpp/devtools/antglob.py | 182 +++++++------- trunk/jsoncpp/devtools/fixeol.py | 126 +++++----- trunk/jsoncpp/devtools/licenseupdater.py | 186 +++++++------- trunk/jsoncpp/devtools/tarball.py | 106 ++++---- trunk/jsoncpp/scons-tools/globtool.py | 104 ++++---- trunk/jsoncpp/test/jsonchecker/readme.txt | 6 +- trunk/jsoncpp/test/rununittests.py | 146 +++++------ 8 files changed, 575 insertions(+), 575 deletions(-) diff --git a/trunk/jsoncpp/amalgamate.py b/trunk/jsoncpp/amalgamate.py index 1476a5f..6e3b11a 100644 --- a/trunk/jsoncpp/amalgamate.py +++ b/trunk/jsoncpp/amalgamate.py @@ -1,147 +1,147 @@ -"""Amalgate json-cpp library sources into a single source and header file. - -Requires Python 2.6 - -Example of invocation (must be invoked from json-cpp top directory): -python amalgate.py -""" -import os -import os.path -import sys - -class AmalgamationFile: - def __init__( self, top_dir ): - self.top_dir = top_dir - self.blocks = [] - - def add_text( self, text ): - if not text.endswith( '\n' ): - text += '\n' - self.blocks.append( text ) - - def add_file( self, relative_input_path, wrap_in_comment=False ): - def add_marker( prefix ): - self.add_text( '' ) - self.add_text( '// ' + '/'*70 ) - self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) - self.add_text( '// ' + '/'*70 ) - self.add_text( '' ) - add_marker( 'Beginning' ) - f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) - content = f.read() - if wrap_in_comment: - content = '/*\n' + content + '\n*/' - self.add_text( content ) - f.close() - add_marker( 'End' ) - self.add_text( '\n\n\n\n' ) - - def get_value( self ): - return ''.join( self.blocks ).replace('\r\n','\n') - - def write_to( self, output_path ): - output_dir = os.path.dirname( output_path ) - if output_dir and not os.path.isdir( output_dir ): - os.makedirs( output_dir ) - f = open( output_path, 'wb' ) - f.write( self.get_value() ) - f.close() - -def amalgamate_source( source_top_dir=None, - target_source_path=None, - header_include_path=None ): - """Produces amalgated source. - Parameters: - source_top_dir: top-directory - target_source_path: output .cpp path - header_include_path: generated header path relative to target_source_path. - """ - print 'Amalgating header...' - header = AmalgamationFile( source_top_dir ) - header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) - header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) - header.add_file( 'LICENSE', wrap_in_comment=True ) - header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) - header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) - header.add_text( '/// If defined, indicates that the source file is amalgated' ) - header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) - header.add_file( 'include/json/config.h' ) - header.add_file( 'include/json/forwards.h' ) - header.add_file( 'include/json/features.h' ) - header.add_file( 'include/json/value.h' ) - header.add_file( 'include/json/reader.h' ) - header.add_file( 'include/json/writer.h' ) - header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) - - target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) - print 'Writing amalgated header to %r' % target_header_path - header.write_to( target_header_path ) - - base, ext = os.path.splitext( header_include_path ) - forward_header_include_path = base + '-forwards' + ext - print 'Amalgating forward header...' - header = AmalgamationFile( source_top_dir ) - header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) - header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) - header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) - header.add_file( 'LICENSE', wrap_in_comment=True ) - header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) - header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) - header.add_text( '/// If defined, indicates that the source file is amalgated' ) - header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) - header.add_file( 'include/json/config.h' ) - header.add_file( 'include/json/forwards.h' ) - header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) - - target_forward_header_path = os.path.join( os.path.dirname(target_source_path), - forward_header_include_path ) - print 'Writing amalgated forward header to %r' % target_forward_header_path - header.write_to( target_forward_header_path ) - - print 'Amalgating source...' - source = AmalgamationFile( source_top_dir ) - source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) - source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) - source.add_file( 'LICENSE', wrap_in_comment=True ) - source.add_text( '' ) - source.add_text( '#include <%s>' % header_include_path ) - source.add_text( '' ) - source.add_file( 'src/lib_json\json_tool.h' ) - source.add_file( 'src/lib_json\json_reader.cpp' ) - source.add_file( 'src/lib_json\json_batchallocator.h' ) - source.add_file( 'src/lib_json\json_valueiterator.inl' ) - source.add_file( 'src/lib_json\json_value.cpp' ) - source.add_file( 'src/lib_json\json_writer.cpp' ) - - print 'Writing amalgated source to %r' % target_source_path - source.write_to( target_source_path ) - -def main(): - usage = """%prog [options] -Generate a single amalgated source and header file from the sources. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', - help="""Output .cpp source path. [Default: %default]""") - parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', - help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") - parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), - help="""Source top-directory. [Default: %default]""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - msg = amalgamate_source( source_top_dir=options.top_dir, - target_source_path=options.target_source_path, - header_include_path=options.header_include_path ) - if msg: - sys.stderr.write( msg + '\n' ) - sys.exit( 1 ) - else: - print 'Source succesfully amalagated' - -if __name__ == '__main__': - main() +"""Amalgate json-cpp library sources into a single source and header file. + +Requires Python 2.6 + +Example of invocation (must be invoked from json-cpp top directory): +python amalgate.py +""" +import os +import os.path +import sys + +class AmalgamationFile: + def __init__( self, top_dir ): + self.top_dir = top_dir + self.blocks = [] + + def add_text( self, text ): + if not text.endswith( '\n' ): + text += '\n' + self.blocks.append( text ) + + def add_file( self, relative_input_path, wrap_in_comment=False ): + def add_marker( prefix ): + self.add_text( '' ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) + self.add_text( '// ' + '/'*70 ) + self.add_text( '' ) + add_marker( 'Beginning' ) + f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) + content = f.read() + if wrap_in_comment: + content = '/*\n' + content + '\n*/' + self.add_text( content ) + f.close() + add_marker( 'End' ) + self.add_text( '\n\n\n\n' ) + + def get_value( self ): + return ''.join( self.blocks ).replace('\r\n','\n') + + def write_to( self, output_path ): + output_dir = os.path.dirname( output_path ) + if output_dir and not os.path.isdir( output_dir ): + os.makedirs( output_dir ) + f = open( output_path, 'wb' ) + f.write( self.get_value() ) + f.close() + +def amalgamate_source( source_top_dir=None, + target_source_path=None, + header_include_path=None ): + """Produces amalgated source. + Parameters: + source_top_dir: top-directory + target_source_path: output .cpp path + header_include_path: generated header path relative to target_source_path. + """ + print 'Amalgating header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_file( 'include/json/features.h' ) + header.add_file( 'include/json/value.h' ) + header.add_file( 'include/json/reader.h' ) + header.add_file( 'include/json/writer.h' ) + header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) + + target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) + print 'Writing amalgated header to %r' % target_header_path + header.write_to( target_header_path ) + + base, ext = os.path.splitext( header_include_path ) + forward_header_include_path = base + '-forwards' + ext + print 'Amalgating forward header...' + header = AmalgamationFile( source_top_dir ) + header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) + header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) + header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) + header.add_file( 'LICENSE', wrap_in_comment=True ) + header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) + header.add_text( '/// If defined, indicates that the source file is amalgated' ) + header.add_text( '/// to prevent private header inclusion.' ) + header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_file( 'include/json/config.h' ) + header.add_file( 'include/json/forwards.h' ) + header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) + + target_forward_header_path = os.path.join( os.path.dirname(target_source_path), + forward_header_include_path ) + print 'Writing amalgated forward header to %r' % target_forward_header_path + header.write_to( target_forward_header_path ) + + print 'Amalgating source...' + source = AmalgamationFile( source_top_dir ) + source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) + source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) + source.add_file( 'LICENSE', wrap_in_comment=True ) + source.add_text( '' ) + source.add_text( '#include <%s>' % header_include_path ) + source.add_text( '' ) + source.add_file( 'src/lib_json\json_tool.h' ) + source.add_file( 'src/lib_json\json_reader.cpp' ) + source.add_file( 'src/lib_json\json_batchallocator.h' ) + source.add_file( 'src/lib_json\json_valueiterator.inl' ) + source.add_file( 'src/lib_json\json_value.cpp' ) + source.add_file( 'src/lib_json\json_writer.cpp' ) + + print 'Writing amalgated source to %r' % target_source_path + source.write_to( target_source_path ) + +def main(): + usage = """%prog [options] +Generate a single amalgated source and header file from the sources. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', + help="""Output .cpp source path. [Default: %default]""") + parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', + help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") + parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), + help="""Source top-directory. [Default: %default]""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + msg = amalgamate_source( source_top_dir=options.top_dir, + target_source_path=options.target_source_path, + header_include_path=options.header_include_path ) + if msg: + sys.stderr.write( msg + '\n' ) + sys.exit( 1 ) + else: + print 'Source succesfully amalagated' + +if __name__ == '__main__': + main() diff --git a/trunk/jsoncpp/devtools/antglob.py b/trunk/jsoncpp/devtools/antglob.py index bbb6fec..30837b5 100644 --- a/trunk/jsoncpp/devtools/antglob.py +++ b/trunk/jsoncpp/devtools/antglob.py @@ -55,20 +55,20 @@ _ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) -def ant_pattern_to_re( ant_pattern ): - """Generates a regular expression from the ant pattern. - Matching convention: - **/a: match 'a', 'dir/a', 'dir1/dir2/a' - a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' - *.py: match 'script.py' but not 'a/script.py' +def ant_pattern_to_re( ant_pattern ): + """Generates a regular expression from the ant pattern. + Matching convention: + **/a: match 'a', 'dir/a', 'dir1/dir2/a' + a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' + *.py: match 'script.py' but not 'a/script.py' """ rex = ['^'] next_pos = 0 - sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) -## print 'Converting', ant_pattern - for match in _ANT_RE.finditer( ant_pattern ): -## print 'Matched', match.group() -## print match.start(0), next_pos + sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) +## print 'Converting', ant_pattern + for match in _ANT_RE.finditer( ant_pattern ): +## print 'Matched', match.group() +## print match.start(0), next_pos if match.start(0) != next_pos: raise ValueError( "Invalid ant pattern" ) if match.group(1): # /**/ @@ -83,14 +83,14 @@ def ant_pattern_to_re( ant_pattern ): rex.append( sep_rex ) else: # somepath rex.append( re.escape(match.group(6)) ) - next_pos = match.end() + next_pos = match.end() rex.append('$') return re.compile( ''.join( rex ) ) - -def _as_list( l ): - if isinstance(l, basestring): - return l.split() - return l + +def _as_list( l ): + if isinstance(l, basestring): + return l.split() + return l def glob(dir_path, includes = '**/*', @@ -99,8 +99,8 @@ def glob(dir_path, prune_dirs = prune_dirs, max_depth = 25): include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] - exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] - prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] + exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] + prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] dir_path = dir_path.replace('/',os.path.sep) entry_type_filter = entry_type @@ -117,37 +117,37 @@ def apply_filter( full_path, filter_rexs ): return True return False - def glob_impl( root_dir_path ): - child_dirs = [root_dir_path] - while child_dirs: + def glob_impl( root_dir_path ): + child_dirs = [root_dir_path] + while child_dirs: dir_path = child_dirs.pop() - for entry in listdir( dir_path ): - full_path = os.path.join( dir_path, entry ) -## print 'Testing:', full_path, - is_dir = os.path.isdir( full_path ) - if is_dir and not is_pruned_dir( entry ): # explore child directory ? -## print '===> marked for recursion', - child_dirs.append( full_path ) - included = apply_filter( full_path, include_filter ) - rejected = apply_filter( full_path, exclude_filter ) - if not included or rejected: # do not include entry ? -## print '=> not included or rejected' - continue - link = os.path.islink( full_path ) - is_file = os.path.isfile( full_path ) - if not is_file and not is_dir: -## print '=> unknown entry type' - continue - if link: - entry_type = is_file and FILE_LINK or DIR_LINK - else: - entry_type = is_file and FILE or DIR -## print '=> type: %d' % entry_type, - if (entry_type & entry_type_filter) != 0: -## print ' => KEEP' - yield os.path.join( dir_path, entry ) -## else: -## print ' => TYPE REJECTED' + for entry in listdir( dir_path ): + full_path = os.path.join( dir_path, entry ) +## print 'Testing:', full_path, + is_dir = os.path.isdir( full_path ) + if is_dir and not is_pruned_dir( entry ): # explore child directory ? +## print '===> marked for recursion', + child_dirs.append( full_path ) + included = apply_filter( full_path, include_filter ) + rejected = apply_filter( full_path, exclude_filter ) + if not included or rejected: # do not include entry ? +## print '=> not included or rejected' + continue + link = os.path.islink( full_path ) + is_file = os.path.isfile( full_path ) + if not is_file and not is_dir: +## print '=> unknown entry type' + continue + if link: + entry_type = is_file and FILE_LINK or DIR_LINK + else: + entry_type = is_file and FILE or DIR +## print '=> type: %d' % entry_type, + if (entry_type & entry_type_filter) != 0: +## print ' => KEEP' + yield os.path.join( dir_path, entry ) +## else: +## print ' => TYPE REJECTED' return list( glob_impl( dir_path ) ) @@ -155,47 +155,47 @@ def glob_impl( root_dir_path ): import unittest class AntPatternToRETest(unittest.TestCase): -## def test_conversion( self ): -## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) - - def test_matching( self ): - test_cases = [ ( 'path', - ['path'], - ['somepath', 'pathsuffix', '/path', '/path'] ), - ( '*.py', - ['source.py', 'source.ext.py', '.py'], - ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), - ( '**/path', - ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], - ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), - ( 'path/**', - ['path/a', 'path/path/a', 'path//'], - ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), - ( '/**/path', - ['/path', '/a/path', '/a/b/path/path', '/path/path'], - ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), - ( 'a/b', - ['a/b'], - ['somea/b', 'a/bsuffix', 'a/b/c'] ), - ( '**/*.py', - ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], - ['script.pyc', 'script.pyo', 'a.py/b'] ), - ( 'src/**/*.py', - ['src/a.py', 'src/dir/a.py'], - ['a/src/a.py', '/src/a.py'] ), - ] - for ant_pattern, accepted_matches, rejected_matches in list(test_cases): - def local_path( paths ): - return [ p.replace('/',os.path.sep) for p in paths ] - test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) - for ant_pattern, accepted_matches, rejected_matches in test_cases: - rex = ant_pattern_to_re( ant_pattern ) - print 'ant_pattern:', ant_pattern, ' => ', rex.pattern - for accepted_match in accepted_matches: - print 'Accepted?:', accepted_match - self.assert_( rex.match( accepted_match ) is not None ) - for rejected_match in rejected_matches: - print 'Rejected?:', rejected_match - self.assert_( rex.match( rejected_match ) is None ) +## def test_conversion( self ): +## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) + + def test_matching( self ): + test_cases = [ ( 'path', + ['path'], + ['somepath', 'pathsuffix', '/path', '/path'] ), + ( '*.py', + ['source.py', 'source.ext.py', '.py'], + ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), + ( '**/path', + ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], + ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), + ( 'path/**', + ['path/a', 'path/path/a', 'path//'], + ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), + ( '/**/path', + ['/path', '/a/path', '/a/b/path/path', '/path/path'], + ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), + ( 'a/b', + ['a/b'], + ['somea/b', 'a/bsuffix', 'a/b/c'] ), + ( '**/*.py', + ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], + ['script.pyc', 'script.pyo', 'a.py/b'] ), + ( 'src/**/*.py', + ['src/a.py', 'src/dir/a.py'], + ['a/src/a.py', '/src/a.py'] ), + ] + for ant_pattern, accepted_matches, rejected_matches in list(test_cases): + def local_path( paths ): + return [ p.replace('/',os.path.sep) for p in paths ] + test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) + for ant_pattern, accepted_matches, rejected_matches in test_cases: + rex = ant_pattern_to_re( ant_pattern ) + print 'ant_pattern:', ant_pattern, ' => ', rex.pattern + for accepted_match in accepted_matches: + print 'Accepted?:', accepted_match + self.assert_( rex.match( accepted_match ) is not None ) + for rejected_match in rejected_matches: + print 'Rejected?:', rejected_match + self.assert_( rex.match( rejected_match ) is None ) unittest.main() diff --git a/trunk/jsoncpp/devtools/fixeol.py b/trunk/jsoncpp/devtools/fixeol.py index 5d8372d..4fed6ce 100644 --- a/trunk/jsoncpp/devtools/fixeol.py +++ b/trunk/jsoncpp/devtools/fixeol.py @@ -1,63 +1,63 @@ -import os.path - -def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True -## -## -## -##def _do_fix( is_dry_run = True ): -## from waftools import antglob -## python_sources = antglob.glob( '.', -## includes = '**/*.py **/wscript **/wscript_build', -## excludes = antglob.default_excludes + './waf.py', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in python_sources: -## _fix_python_source( path, is_dry_run ) -## -## cpp_sources = antglob.glob( '.', -## includes = '**/*.cpp **/*.h **/*.inl', -## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) -## for path in cpp_sources: -## _fix_source_eol( path, is_dry_run ) -## -## -##def dry_fix(context): -## _do_fix( is_dry_run = True ) -## -##def fix(context): -## _do_fix( is_dry_run = False ) -## -##def shutdown(): -## pass -## -##def check(context): -## # Unit tests are run when "check" target is used -## ut = UnitTest.unit_test() -## ut.change_to_testfile_dir = True -## ut.want_to_see_test_output = True -## ut.want_to_see_test_error = True -## ut.run() -## ut.print_results() +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/trunk/jsoncpp/devtools/licenseupdater.py b/trunk/jsoncpp/devtools/licenseupdater.py index 03e0467..866eada 100644 --- a/trunk/jsoncpp/devtools/licenseupdater.py +++ b/trunk/jsoncpp/devtools/licenseupdater.py @@ -1,93 +1,93 @@ -"""Updates the license text in source file. -""" - -# An existing license is found if the file starts with the string below, -# and ends with the first blank line. -LICENSE_BEGIN = "// Copyright " - -BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur -// Distributed under MIT license, or public domain if desired and -// recognized in your jurisdiction. -// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE - -""".replace('\r\n','\n') - -def update_license( path, dry_run, show_diff ): - """Update the license statement in the specified file. - Parameters: - path: path of the C++ source file to update. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - with open( path, 'rt' ) as fin: - original_text = fin.read().replace('\r\n','\n') - newline = fin.newlines and fin.newlines[0] or '\n' - if not original_text.startswith( LICENSE_BEGIN ): - # No existing license found => prepend it - new_text = BRIEF_LICENSE + original_text - else: - license_end_index = original_text.index( '\n\n' ) # search first blank line - new_text = BRIEF_LICENSE + original_text[license_end_index+2:] - if original_text != new_text: - if not dry_run: - with open( path, 'wb' ) as fout: - fout.write( new_text.replace('\n', newline ) ) - print 'Updated', path - if show_diff: - import difflib - print '\n'.join( difflib.unified_diff( original_text.split('\n'), - new_text.split('\n') ) ) - return True - return False - -def update_license_in_source_directories( source_dirs, dry_run, show_diff ): - """Updates license text in C++ source files found in directory source_dirs. - Parameters: - source_dirs: list of directory to scan for C++ sources. Directories are - scanned recursively. - dry_run: if True, just print the path of the file that would be updated, - but don't change it. - show_diff: if True, print the path of the file that would be modified, - as well as the change made to the file. - """ - from devtools import antglob - prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' - for source_dir in source_dirs: - cpp_sources = antglob.glob( source_dir, - includes = '''**/*.h **/*.cpp **/*.inl''', - prune_dirs = prune_dirs ) - for source in cpp_sources: - update_license( source, dry_run, show_diff ) - -def main(): - usage = """%prog DIR [DIR2...] -Updates license text in sources of the project in source files found -in the directory specified on the command-line. - -Example of call: -python devtools\licenseupdater.py include src -n --diff -=> Show change that would be made to the sources. - -python devtools\licenseupdater.py include src -=> Update license statement on all sources in directories include/ and src/. -""" - from optparse import OptionParser - parser = OptionParser(usage=usage) - parser.allow_interspersed_args = False - parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, - help="""Only show what files are updated, do not update the files""") - parser.add_option('--diff', dest="show_diff", action='store_true', default=False, - help="""On update, show change made to the file.""") - parser.enable_interspersed_args() - options, args = parser.parse_args() - update_license_in_source_directories( args, options.dry_run, options.show_diff ) - print 'Done' - -if __name__ == '__main__': - import sys - import os.path - sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - main() - +"""Updates the license text in source file. +""" + +# An existing license is found if the file starts with the string below, +# and ends with the first blank line. +LICENSE_BEGIN = "// Copyright " + +BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +""".replace('\r\n','\n') + +def update_license( path, dry_run, show_diff ): + """Update the license statement in the specified file. + Parameters: + path: path of the C++ source file to update. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + with open( path, 'rt' ) as fin: + original_text = fin.read().replace('\r\n','\n') + newline = fin.newlines and fin.newlines[0] or '\n' + if not original_text.startswith( LICENSE_BEGIN ): + # No existing license found => prepend it + new_text = BRIEF_LICENSE + original_text + else: + license_end_index = original_text.index( '\n\n' ) # search first blank line + new_text = BRIEF_LICENSE + original_text[license_end_index+2:] + if original_text != new_text: + if not dry_run: + with open( path, 'wb' ) as fout: + fout.write( new_text.replace('\n', newline ) ) + print 'Updated', path + if show_diff: + import difflib + print '\n'.join( difflib.unified_diff( original_text.split('\n'), + new_text.split('\n') ) ) + return True + return False + +def update_license_in_source_directories( source_dirs, dry_run, show_diff ): + """Updates license text in C++ source files found in directory source_dirs. + Parameters: + source_dirs: list of directory to scan for C++ sources. Directories are + scanned recursively. + dry_run: if True, just print the path of the file that would be updated, + but don't change it. + show_diff: if True, print the path of the file that would be modified, + as well as the change made to the file. + """ + from devtools import antglob + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + for source_dir in source_dirs: + cpp_sources = antglob.glob( source_dir, + includes = '''**/*.h **/*.cpp **/*.inl''', + prune_dirs = prune_dirs ) + for source in cpp_sources: + update_license( source, dry_run, show_diff ) + +def main(): + usage = """%prog DIR [DIR2...] +Updates license text in sources of the project in source files found +in the directory specified on the command-line. + +Example of call: +python devtools\licenseupdater.py include src -n --diff +=> Show change that would be made to the sources. + +python devtools\licenseupdater.py include src +=> Update license statement on all sources in directories include/ and src/. +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = False + parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, + help="""Only show what files are updated, do not update the files""") + parser.add_option('--diff', dest="show_diff", action='store_true', default=False, + help="""On update, show change made to the file.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + update_license_in_source_directories( args, options.dry_run, options.show_diff ) + print 'Done' + +if __name__ == '__main__': + import sys + import os.path + sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + main() + diff --git a/trunk/jsoncpp/devtools/tarball.py b/trunk/jsoncpp/devtools/tarball.py index 182602e..ccbda39 100644 --- a/trunk/jsoncpp/devtools/tarball.py +++ b/trunk/jsoncpp/devtools/tarball.py @@ -1,53 +1,53 @@ -import os.path -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) - try: - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - finally: - tar.close() - -def decompress( tarball_path, base_dir ): - """Decompress the gzipped tarball into directory base_dir. - """ - # !!! This class method is not documented in the online doc - # nor is bz2open! - tar = tarfile.TarFile.gzopen(tarball_path, mode='r') - try: - tar.extractall( base_dir ) - finally: - tar.close() +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) + try: + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + finally: + tar.close() + +def decompress( tarball_path, base_dir ): + """Decompress the gzipped tarball into directory base_dir. + """ + # !!! This class method is not documented in the online doc + # nor is bz2open! + tar = tarfile.TarFile.gzopen(tarball_path, mode='r') + try: + tar.extractall( base_dir ) + finally: + tar.close() diff --git a/trunk/jsoncpp/scons-tools/globtool.py b/trunk/jsoncpp/scons-tools/globtool.py index 8ee3cbb..811140e 100644 --- a/trunk/jsoncpp/scons-tools/globtool.py +++ b/trunk/jsoncpp/scons-tools/globtool.py @@ -1,53 +1,53 @@ -import fnmatch -import os - -def generate( env ): - def Glob( env, includes = None, excludes = None, dir = '.' ): - """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') - helper function to environment. - - Glob both the file-system files. - - includes: list of file name pattern included in the return list when matched. - excludes: list of file name pattern exluced from the return list. - - Example: - sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) - """ - def filterFilename(path): - abs_path = os.path.join( dir, path ) - if not os.path.isfile(abs_path): - return 0 - fn = os.path.basename(path) - match = 0 - for include in includes: - if fnmatch.fnmatchcase( fn, include ): - match = 1 - break - if match == 1 and not excludes is None: - for exclude in excludes: - if fnmatch.fnmatchcase( fn, exclude ): - match = 0 - break - return match - if includes is None: - includes = ('*',) - elif type(includes) in ( type(''), type(u'') ): - includes = (includes,) - if type(excludes) in ( type(''), type(u'') ): - excludes = (excludes,) - dir = env.Dir(dir).abspath - paths = os.listdir( dir ) - def makeAbsFileNode( path ): - return env.File( os.path.join( dir, path ) ) - nodes = filter( filterFilename, paths ) - return map( makeAbsFileNode, nodes ) - - from SCons.Script import Environment +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment Environment.Glob = Glob - -def exists(env): - """ - Tool always exists. - """ - return True + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/trunk/jsoncpp/test/jsonchecker/readme.txt b/trunk/jsoncpp/test/jsonchecker/readme.txt index 0efc2a4..321d89d 100644 --- a/trunk/jsoncpp/test/jsonchecker/readme.txt +++ b/trunk/jsoncpp/test/jsonchecker/readme.txt @@ -1,3 +1,3 @@ -Test suite from http://json.org/JSON_checker/. - -If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/trunk/jsoncpp/test/rununittests.py b/trunk/jsoncpp/test/rununittests.py index ccc54e4..366184c 100644 --- a/trunk/jsoncpp/test/rununittests.py +++ b/trunk/jsoncpp/test/rununittests.py @@ -1,73 +1,73 @@ -import sys -import os -import os.path -import subprocess -from glob import glob -import optparse - -VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' - -class TestProxy(object): - def __init__( self, test_exe_path, use_valgrind=False ): - self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) - self.use_valgrind = use_valgrind - - def run( self, options ): - if self.use_valgrind: - cmd = VALGRIND_CMD.split() - else: - cmd = [] - cmd.extend( [self.test_exe_path, '--test-auto'] + options ) - process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) - stdout = process.communicate()[0] - if process.returncode: - return False, stdout - return True, stdout - -def runAllTests( exe_path, use_valgrind=False ): - test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) - status, test_names = test_proxy.run( ['--list-tests'] ) - if not status: - print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names - return 1 - test_names = [name.strip() for name in test_names.strip().split('\n')] - failures = [] - for name in test_names: - print 'TESTING %s:' % name, - succeed, result = test_proxy.run( ['--test', name] ) - if succeed: - print 'OK' - else: - failures.append( (name, result) ) - print 'FAILED' - failed_count = len(failures) - pass_count = len(test_names) - failed_count - if failed_count: - print - for name, result in failures: - print result - print '%d/%d tests passed (%d failure(s))' % ( - pass_count, len(test_names), failed_count) - return 1 - else: - print 'All %d tests passed' % len(test_names) - return 0 - -def main(): - from optparse import OptionParser - parser = OptionParser( usage="%prog [options] " ) - parser.add_option("--valgrind", - action="store_true", dest="valgrind", default=False, - help="run all the tests using valgrind to detect memory leaks") - parser.enable_interspersed_args() - options, args = parser.parse_args() - - if len(args) != 1: - parser.error( 'Must provides at least path to test_lib_json executable.' ) - sys.exit( 1 ) - - exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) - sys.exit( exit_code ) - -if __name__ == '__main__': - main() +import sys +import os +import os.path +import subprocess +from glob import glob +import optparse + +VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' + +class TestProxy(object): + def __init__( self, test_exe_path, use_valgrind=False ): + self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) + self.use_valgrind = use_valgrind + + def run( self, options ): + if self.use_valgrind: + cmd = VALGRIND_CMD.split() + else: + cmd = [] + cmd.extend( [self.test_exe_path, '--test-auto'] + options ) + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) + stdout = process.communicate()[0] + if process.returncode: + return False, stdout + return True, stdout + +def runAllTests( exe_path, use_valgrind=False ): + test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) + status, test_names = test_proxy.run( ['--list-tests'] ) + if not status: + print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names + return 1 + test_names = [name.strip() for name in test_names.strip().split('\n')] + failures = [] + for name in test_names: + print 'TESTING %s:' % name, + succeed, result = test_proxy.run( ['--test', name] ) + if succeed: + print 'OK' + else: + failures.append( (name, result) ) + print 'FAILED' + failed_count = len(failures) + pass_count = len(test_names) - failed_count + if failed_count: + print + for name, result in failures: + print result + print '%d/%d tests passed (%d failure(s))' % ( + pass_count, len(test_names), failed_count) + return 1 + else: + print 'All %d tests passed' % len(test_names) + return 0 + +def main(): + from optparse import OptionParser + parser = OptionParser( usage="%prog [options] " ) + parser.add_option("--valgrind", + action="store_true", dest="valgrind", default=False, + help="run all the tests using valgrind to detect memory leaks") + parser.enable_interspersed_args() + options, args = parser.parse_args() + + if len(args) != 1: + parser.error( 'Must provides at least path to test_lib_json executable.' ) + sys.exit( 1 ) + + exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) + sys.exit( exit_code ) + +if __name__ == '__main__': + main() From 7f5695e5b3c49a9181d79986e50251cf76f6d1a6 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 21:53:02 +0000 Subject: [PATCH 238/268] More eol changes. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@238 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/test/data/fail_test_array_01.json | 2 +- trunk/jsoncpp/test/data/test_comment_01.expected | 16 ++++++++-------- trunk/jsoncpp/test/data/test_comment_01.json | 16 ++++++++-------- .../test/data/test_integer_06_64bits.expected | 2 +- .../test/data/test_integer_06_64bits.json | 4 ++-- .../test/data/test_integer_07_64bits.expected | 2 +- .../test/data/test_integer_07_64bits.json | 4 ++-- .../test/data/test_integer_08_64bits.expected | 2 +- .../test/data/test_integer_08_64bits.json | 4 ++-- trunk/jsoncpp/test/data/test_string_01.json | 2 +- trunk/jsoncpp/test/data/test_string_02.json | 2 +- trunk/jsoncpp/test/data/test_string_03.json | 2 +- .../test/data/test_string_unicode_01.expected | 2 +- .../test/data/test_string_unicode_02.expected | 2 +- .../test/data/test_string_unicode_03.expected | 2 +- .../test/data/test_string_unicode_04.expected | 2 +- .../test/data/test_string_unicode_05.expected | 4 ++-- 17 files changed, 35 insertions(+), 35 deletions(-) diff --git a/trunk/jsoncpp/test/data/fail_test_array_01.json b/trunk/jsoncpp/test/data/fail_test_array_01.json index 900fcc2..f72a6d0 100644 --- a/trunk/jsoncpp/test/data/fail_test_array_01.json +++ b/trunk/jsoncpp/test/data/fail_test_array_01.json @@ -1 +1 @@ -[ 1 2 3] +[ 1 2 3] diff --git a/trunk/jsoncpp/test/data/test_comment_01.expected b/trunk/jsoncpp/test/data/test_comment_01.expected index 0b8f42d..2a7f00c 100644 --- a/trunk/jsoncpp/test/data/test_comment_01.expected +++ b/trunk/jsoncpp/test/data/test_comment_01.expected @@ -1,8 +1,8 @@ -.={} -.test=[] -.test[0]={} -.test[0].a="aaa" -.test[1]={} -.test[1].b="bbb" -.test[2]={} -.test[2].c="ccc" +.={} +.test=[] +.test[0]={} +.test[0].a="aaa" +.test[1]={} +.test[1].b="bbb" +.test[2]={} +.test[2].c="ccc" diff --git a/trunk/jsoncpp/test/data/test_comment_01.json b/trunk/jsoncpp/test/data/test_comment_01.json index 0de8f9c..7363490 100644 --- a/trunk/jsoncpp/test/data/test_comment_01.json +++ b/trunk/jsoncpp/test/data/test_comment_01.json @@ -1,8 +1,8 @@ -{ - "test": - [ - { "a" : "aaa" }, // Comment for a - { "b" : "bbb" }, // Comment for b - { "c" : "ccc" } // Comment for c - ] -} +{ + "test": + [ + { "a" : "aaa" }, // Comment for a + { "b" : "bbb" }, // Comment for b + { "c" : "ccc" } // Comment for c + ] +} diff --git a/trunk/jsoncpp/test/data/test_integer_06_64bits.expected b/trunk/jsoncpp/test/data/test_integer_06_64bits.expected index bc9520a1..131b085 100644 --- a/trunk/jsoncpp/test/data/test_integer_06_64bits.expected +++ b/trunk/jsoncpp/test/data/test_integer_06_64bits.expected @@ -1 +1 @@ -.=9223372036854775808 +.=9223372036854775808 diff --git a/trunk/jsoncpp/test/data/test_integer_06_64bits.json b/trunk/jsoncpp/test/data/test_integer_06_64bits.json index 360d660..cfedfe5 100644 --- a/trunk/jsoncpp/test/data/test_integer_06_64bits.json +++ b/trunk/jsoncpp/test/data/test_integer_06_64bits.json @@ -1,2 +1,2 @@ -9223372036854775808 - +9223372036854775808 + diff --git a/trunk/jsoncpp/test/data/test_integer_07_64bits.expected b/trunk/jsoncpp/test/data/test_integer_07_64bits.expected index 39eb798..c8524a3 100644 --- a/trunk/jsoncpp/test/data/test_integer_07_64bits.expected +++ b/trunk/jsoncpp/test/data/test_integer_07_64bits.expected @@ -1 +1 @@ -.=-9223372036854775808 +.=-9223372036854775808 diff --git a/trunk/jsoncpp/test/data/test_integer_07_64bits.json b/trunk/jsoncpp/test/data/test_integer_07_64bits.json index 11d8513..a964ad2 100644 --- a/trunk/jsoncpp/test/data/test_integer_07_64bits.json +++ b/trunk/jsoncpp/test/data/test_integer_07_64bits.json @@ -1,2 +1,2 @@ --9223372036854775808 - +-9223372036854775808 + diff --git a/trunk/jsoncpp/test/data/test_integer_08_64bits.expected b/trunk/jsoncpp/test/data/test_integer_08_64bits.expected index 831f432..321bba5 100644 --- a/trunk/jsoncpp/test/data/test_integer_08_64bits.expected +++ b/trunk/jsoncpp/test/data/test_integer_08_64bits.expected @@ -1 +1 @@ -.=18446744073709551615 +.=18446744073709551615 diff --git a/trunk/jsoncpp/test/data/test_integer_08_64bits.json b/trunk/jsoncpp/test/data/test_integer_08_64bits.json index 6e1fb04..4c15a01 100644 --- a/trunk/jsoncpp/test/data/test_integer_08_64bits.json +++ b/trunk/jsoncpp/test/data/test_integer_08_64bits.json @@ -1,2 +1,2 @@ -18446744073709551615 - +18446744073709551615 + diff --git a/trunk/jsoncpp/test/data/test_string_01.json b/trunk/jsoncpp/test/data/test_string_01.json index c8c059b..6cd0db4 100644 --- a/trunk/jsoncpp/test/data/test_string_01.json +++ b/trunk/jsoncpp/test/data/test_string_01.json @@ -1 +1 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/trunk/jsoncpp/test/data/test_string_02.json b/trunk/jsoncpp/test/data/test_string_02.json index f0fe56a..9a7e5dc 100644 --- a/trunk/jsoncpp/test/data/test_string_02.json +++ b/trunk/jsoncpp/test/data/test_string_02.json @@ -1 +1 @@ -"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" +"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~" diff --git a/trunk/jsoncpp/test/data/test_string_03.json b/trunk/jsoncpp/test/data/test_string_03.json index f0a220f..2d38180 100644 --- a/trunk/jsoncpp/test/data/test_string_03.json +++ b/trunk/jsoncpp/test/data/test_string_03.json @@ -1 +1 @@ -"http:\/\/jsoncpp.sourceforge.net\/" +"http:\/\/jsoncpp.sourceforge.net\/" diff --git a/trunk/jsoncpp/test/data/test_string_unicode_01.expected b/trunk/jsoncpp/test/data/test_string_unicode_01.expected index 447f85a..1f3be7f 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_01.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_01.expected @@ -1 +1 @@ -.="a" +.="a" diff --git a/trunk/jsoncpp/test/data/test_string_unicode_02.expected b/trunk/jsoncpp/test/data/test_string_unicode_02.expected index c0b3b43..1388f53 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_02.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_02.expected @@ -1 +1 @@ -.="¢" +.="¢" diff --git a/trunk/jsoncpp/test/data/test_string_unicode_03.expected b/trunk/jsoncpp/test/data/test_string_unicode_03.expected index 7289743..9b80b27 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_03.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_03.expected @@ -1 +1 @@ -.="€" +.="€" diff --git a/trunk/jsoncpp/test/data/test_string_unicode_04.expected b/trunk/jsoncpp/test/data/test_string_unicode_04.expected index 868fbc3..b9e7fe3 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_04.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_04.expected @@ -1 +1 @@ -.="𝄞" +.="𝄞" diff --git a/trunk/jsoncpp/test/data/test_string_unicode_05.expected b/trunk/jsoncpp/test/data/test_string_unicode_05.expected index 19b2c40..c2e67f9 100644 --- a/trunk/jsoncpp/test/data/test_string_unicode_05.expected +++ b/trunk/jsoncpp/test/data/test_string_unicode_05.expected @@ -1,2 +1,2 @@ -.="Zażółć gęślą jaźń" - +.="Zażółć gęślą jaźń" + From bd9104f6ba6d03b5b0be3e73bb03ca5b232aaa73 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 21:56:54 +0000 Subject: [PATCH 239/268] Simple changes to Reader initialization, from Chromium folks. (I do not think this was submitted as a bug.) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@239 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 24 +++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 238a3c5..e55745e 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -1,4 +1,4 @@ -// Copyright 2007-2010 Baptiste Lepilleur +// Copyright 2007-2011 Baptiste Lepilleur // Distributed under MIT license, or public domain if desired and // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE @@ -80,13 +80,31 @@ containsNewLine( Reader::Location begin, // ////////////////////////////////////////////////////////////////// Reader::Reader() - : features_( Features::all() ) + : errors_(), + document_(), + begin_(), + end_(), + current_(), + lastValueEnd_(), + lastValue_(), + commentsBefore_(), + features_( Features::all() ), + collectComments_() { } Reader::Reader( const Features &features ) - : features_( features ) + : errors_(), + document_(), + begin_(), + end_(), + current_(), + lastValueEnd_(), + lastValue_(), + commentsBefore_(), + features_( features ), + collectComments_() { } From 8635c6cbc349346732cbb26bda2d9eab54a80c73 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 22:08:49 +0000 Subject: [PATCH 240/268] Another simple addition for constructor initialization, PathArgument. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@240 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index b629987..fc809ce 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1,4 +1,4 @@ -// Copyright 2007-2010 Baptiste Lepilleur +// Copyright 2011 Baptiste Lepilleur // Distributed under MIT license, or public domain if desired and // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE @@ -270,7 +270,7 @@ Value::CZString::isStaticString() const */ Value::Value( ValueType type ) : type_( type ) - , allocated_( 0 ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -314,6 +314,7 @@ Value::Value( ValueType type ) Value::Value( UInt value ) : type_( uintValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -324,6 +325,7 @@ Value::Value( UInt value ) Value::Value( Int value ) : type_( intValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -336,6 +338,7 @@ Value::Value( Int value ) # if defined(JSON_HAS_INT64) Value::Value( Int64 value ) : type_( intValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -347,6 +350,7 @@ Value::Value( Int64 value ) Value::Value( UInt64 value ) : type_( uintValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -358,6 +362,7 @@ Value::Value( UInt64 value ) Value::Value( double value ) : type_( realValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -432,6 +437,7 @@ Value::Value( const CppTL::ConstString &value ) Value::Value( bool value ) : type_( booleanValue ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -443,6 +449,7 @@ Value::Value( bool value ) Value::Value( const Value &other ) : type_( other.type_ ) + , allocated_( false ) # ifdef JSON_VALUE_USE_INTERNAL_MAP , itemIsUsed_( 0 ) #endif @@ -1701,13 +1708,16 @@ Value::end() // ////////////////////////////////////////////////////////////////// PathArgument::PathArgument() - : kind_( kindNone ) + : key_() + , index_() + , kind_( kindNone ) { } PathArgument::PathArgument( ArrayIndex index ) - : index_( index ) + : key_() + , index_( index ) , kind_( kindIndex ) { } @@ -1715,6 +1725,7 @@ PathArgument::PathArgument( ArrayIndex index ) PathArgument::PathArgument( const char *key ) : key_( key ) + , index_() , kind_( kindKey ) { } @@ -1722,6 +1733,7 @@ PathArgument::PathArgument( const char *key ) PathArgument::PathArgument( const std::string &key ) : key_( key.c_str() ) + , index_() , kind_( kindKey ) { } From 808c4f43a4f37dd085a33ae9477d9c0dc9107be0 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Tue, 21 Jun 2011 23:02:06 +0000 Subject: [PATCH 241/268] More missing constructor initializers found by Coverity. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@241 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 1bda183..d307222 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -1,4 +1,4 @@ -// Copyright 2007-2010 Baptiste Lepilleur +// Copyright 2011 Baptiste Lepilleur // Distributed under MIT license, or public domain if desired and // recognized in your jurisdiction. // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE @@ -278,6 +278,7 @@ FastWriter::writeValue( const Value &value ) StyledWriter::StyledWriter() : rightMargin_( 74 ) , indentSize_( 3 ) + , addChildValues_() { } @@ -554,6 +555,7 @@ StyledStreamWriter::StyledStreamWriter( std::string indentation ) : document_(NULL) , rightMargin_( 74 ) , indentation_( indentation ) + , addChildValues_() { } From 1ebb869b9a2b151af4b84a36da7e9dc2f3438a9f Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 22 Jun 2011 00:43:31 +0000 Subject: [PATCH 242/268] (bug#3314841) Fixed JSON_IS_AMALGAMATION. Using os.path for OSX filename compatibility. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@242 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/amalgamate.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/trunk/jsoncpp/amalgamate.py b/trunk/jsoncpp/amalgamate.py index 6e3b11a..eab724f 100644 --- a/trunk/jsoncpp/amalgamate.py +++ b/trunk/jsoncpp/amalgamate.py @@ -65,7 +65,7 @@ def amalgamate_source( source_top_dir=None, header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) header.add_text( '/// If defined, indicates that the source file is amalgated' ) header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_text( '#define JSON_IS_AMALGAMATION' ) header.add_file( 'include/json/config.h' ) header.add_file( 'include/json/forwards.h' ) header.add_file( 'include/json/features.h' ) @@ -90,7 +90,7 @@ def amalgamate_source( source_top_dir=None, header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) header.add_text( '/// If defined, indicates that the source file is amalgated' ) header.add_text( '/// to prevent private header inclusion.' ) - header.add_text( '#define JSON_IS_AMALGATED' ) + header.add_text( '#define JSON_IS_AMALGAMATION' ) header.add_file( 'include/json/config.h' ) header.add_file( 'include/json/forwards.h' ) header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) @@ -108,12 +108,13 @@ def amalgamate_source( source_top_dir=None, source.add_text( '' ) source.add_text( '#include <%s>' % header_include_path ) source.add_text( '' ) - source.add_file( 'src/lib_json\json_tool.h' ) - source.add_file( 'src/lib_json\json_reader.cpp' ) - source.add_file( 'src/lib_json\json_batchallocator.h' ) - source.add_file( 'src/lib_json\json_valueiterator.inl' ) - source.add_file( 'src/lib_json\json_value.cpp' ) - source.add_file( 'src/lib_json\json_writer.cpp' ) + lib_json = 'src/lib_json' + source.add_file( os.path.join(lib_json, 'json_tool.h') ) + source.add_file( os.path.join(lib_json, 'json_reader.cpp') ) + source.add_file( os.path.join(lib_json, 'json_batchallocator.h') ) + source.add_file( os.path.join(lib_json, 'json_valueiterator.inl') ) + source.add_file( os.path.join(lib_json, 'json_value.cpp') ) + source.add_file( os.path.join(lib_json, 'json_writer.cpp') ) print 'Writing amalgated source to %r' % target_source_path source.write_to( target_source_path ) From 6340a7805eae0b11c7cd863471637f947d6bdd92 Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 22 Jun 2011 08:30:21 +0000 Subject: [PATCH 243/268] bug#3306345: minor typo in Path::resolve() -- missing bang. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@243 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_value.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index fc809ce..3ba36fe 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1840,7 +1840,7 @@ Path::resolve( const Value &root ) const const PathArgument &arg = *it; if ( arg.kind_ == PathArgument::kindIndex ) { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + if ( !node->isArray() || !node->isValidIndex( arg.index_ ) ) { // Error: unable to resolve path (array value expected at position... } @@ -1873,7 +1873,7 @@ Path::resolve( const Value &root, const PathArgument &arg = *it; if ( arg.kind_ == PathArgument::kindIndex ) { - if ( !node->isArray() || node->isValidIndex( arg.index_ ) ) + if ( !node->isArray() || !node->isValidIndex( arg.index_ ) ) return defaultValue; node = &((*node)[arg.index_]); } From d638d6aa78a09ecc706ffe803d55c4f374ea8dff Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Wed, 22 Jun 2011 21:04:41 +0000 Subject: [PATCH 244/268] bug#2407932: strpbrk() could fail for NULL pointer. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@244 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index d307222..47e768c 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -119,6 +119,8 @@ std::string valueToString( bool value ) std::string valueToQuotedString( const char *value ) { + if (value == NULL) + return ""; // Not sure how to handle unicode... if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value )) return std::string("\"") + value + "\""; From a20496a685eb4b7f630c6aebc7e7bef688eb1acb Mon Sep 17 00:00:00 2001 From: christopherdunn Date: Fri, 24 Jun 2011 21:15:30 +0000 Subject: [PATCH 245/268] Updated bug-fix list. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@245 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 86c28c0..8316ff6 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -122,10 +122,18 @@ New in SVN length of 32 characters. - Fixed Value::operator <= implementation (had the semantic of operator >=). - Found when addigin unit tests for comparison operators. + Found when adding unit tests for comparison operators. - Value::compare() is now const and has an actual implementation with unit tests. + + - Bug #2407932: strpbrk() can fail for NULL pointer. + + - Bug #3306345: Fixed minor typo in Path::resolve(). + + - Bug #3314841/#3306896: errors in amalgamate.py + + - Fixed some Coverity warnings and line-endings. * License From 6e4c0f7cf9de355256d961ab75e91bf50ac89a95 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Wed, 14 Sep 2011 08:41:37 +0000 Subject: [PATCH 246/268] Made JSON_USE_EXCEPTION's value in config.h a default that can be overridden. This allows users to override it with their compiler invocation. For example: g++ -D JSON_USE_EXCEPTION=0 ... git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@246 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 4 ++-- trunk/jsoncpp/include/json/config.h | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h index 5a27d5f..84eb5d7 100644 --- a/trunk/jsoncpp/include/json/assertions.h +++ b/trunk/jsoncpp/include/json/assertions.h @@ -13,10 +13,10 @@ # include #endif // if !defined(JSON_IS_AMALGAMATION) -#if defined(JSON_USE_EXCEPTION) +#if JSON_USE_EXCEPTION #define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw #define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); -#else // defined(JSON_USE_EXCEPTION) +#else // JSON_USE_EXCEPTION #define JSON_ASSERT( condition ) assert( condition ); #define JSON_FAIL_MESSAGE( message ) { std::cerr << std::endl << message << std::endl; exit(123); } #endif diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 7609d45..72437c4 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -24,9 +24,11 @@ /// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined. //# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1 -/// If defined, indicates that Json use exception to report invalid type manipulation -/// instead of C assert macro. +// If non-zero, the library uses exceptions to report bad input instead of C +// assertion macros. The default is to use exceptions. +# ifndef JSON_USE_EXCEPTION # define JSON_USE_EXCEPTION 1 +# endif /// If defined, indicates that the source file is amalgated /// to prevent private header inclusion. From 61c613e94dc37f10af2aa55f533793aa20d10e87 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Thu, 22 Dec 2011 03:18:24 +0000 Subject: [PATCH 247/268] Got rid of several unnecessary includes of . Including causes the file to be polluted with a static initializer for the __ioinit symbol. This can harm binary startup time. For more info, see here: http://neugierig.org/software/chromium/notes/2011/08/static-initializers.html git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@247 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 9 +++++++-- trunk/jsoncpp/include/json/reader.h | 1 - trunk/jsoncpp/include/json/writer.h | 1 - trunk/jsoncpp/src/lib_json/json_reader.cpp | 10 ++++++++-- trunk/jsoncpp/src/lib_json/json_value.cpp | 1 - trunk/jsoncpp/src/lib_json/json_writer.cpp | 1 - 6 files changed, 15 insertions(+), 8 deletions(-) diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h index 84eb5d7..2a22cc4 100644 --- a/trunk/jsoncpp/include/json/assertions.h +++ b/trunk/jsoncpp/include/json/assertions.h @@ -7,7 +7,6 @@ # define CPPTL_JSON_ASSERTIONS_H_INCLUDED #include -#include #if !defined(JSON_IS_AMALGAMATION) # include @@ -18,7 +17,13 @@ #define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message ); #else // JSON_USE_EXCEPTION #define JSON_ASSERT( condition ) assert( condition ); -#define JSON_FAIL_MESSAGE( message ) { std::cerr << std::endl << message << std::endl; exit(123); } + +// The call to assert() will show the failure message in debug builds. In +// release bugs we write to invalid memory in order to crash hard instead of +// calling exit(), so that a debugger or crash reporter gets the chance to take +// over. +#define JSON_FAIL_MESSAGE( message ) { assert(false && message); strcpy(reinterpret_cast(666), message); } + #endif #define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) { JSON_FAIL_MESSAGE( message ) } diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index 0a324df..a3023b3 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -13,7 +13,6 @@ # include # include # include -# include namespace Json { diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 4789363..38d41e1 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -11,7 +11,6 @@ #endif // if !defined(JSON_IS_AMALGAMATION) # include # include -# include namespace Json { diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index e55745e..1f3873a 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -13,7 +13,6 @@ #include #include #include -#include #include #if _MSC_VER >= 1400 // VC++ 8.0 @@ -904,7 +903,14 @@ std::istream& operator>>( std::istream &sin, Value &root ) { Json::Reader reader; bool ok = reader.parse(sin, root, true); - if (!ok) JSON_FAIL_MESSAGE(reader.getFormattedErrorMessages()); + if (!ok) { + fprintf( + stderr, + "Error from reader: %s", + reader.getFormattedErrorMessages().c_str()); + + JSON_FAIL_MESSAGE("reader error"); + } return sin; } diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 3ba36fe..91f312e 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -12,7 +12,6 @@ # endif // #ifndef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR #endif // if !defined(JSON_IS_AMALGAMATION) #include -#include #include #include #include diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index 47e768c..b44def3 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -11,7 +11,6 @@ #include #include #include -#include #include #include From cd27b51e076868756fe9b19994e8f81260b03368 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Sun, 8 Jan 2012 23:49:55 +0000 Subject: [PATCH 248/268] Added an exit() to JSON_FAIL_MESSAGE to fix "no return" errors. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@248 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/assertions.h | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/include/json/assertions.h b/trunk/jsoncpp/include/json/assertions.h index 2a22cc4..a480585 100644 --- a/trunk/jsoncpp/include/json/assertions.h +++ b/trunk/jsoncpp/include/json/assertions.h @@ -19,10 +19,10 @@ #define JSON_ASSERT( condition ) assert( condition ); // The call to assert() will show the failure message in debug builds. In -// release bugs we write to invalid memory in order to crash hard instead of -// calling exit(), so that a debugger or crash reporter gets the chance to take -// over. -#define JSON_FAIL_MESSAGE( message ) { assert(false && message); strcpy(reinterpret_cast(666), message); } +// release bugs we write to invalid memory in order to crash hard, so that a +// debugger or crash reporter gets the chance to take over. We still call exit() +// afterward in order to tell the compiler that this macro doesn't return. +#define JSON_FAIL_MESSAGE( message ) { assert(false && message); strcpy(reinterpret_cast(666), message); exit(123); } #endif From f349a129c1bf9537e340675d72381d13d0115f39 Mon Sep 17 00:00:00 2001 From: aaronjacobs Date: Mon, 12 Mar 2012 04:53:57 +0000 Subject: [PATCH 249/268] Made it possible to drop null placeholders from array output. This can be used when it's clear that the consumer is able to deal with this, as web browsers are. Thanks to Yatin Chawathe for the patch. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@249 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/include/json/writer.h | 8 ++++++++ trunk/jsoncpp/src/lib_json/json_writer.cpp | 12 ++++++++++-- trunk/jsoncpp/src/test_lib_json/main.cpp | 18 ++++++++++++++++++ 3 files changed, 36 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 38d41e1..46d5ccc 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -40,6 +40,13 @@ namespace Json { void enableYAMLCompatibility(); + /** \brief Drop the "null" string from the writer's output for nullValues. + * Strictly speaking, this is not valid JSON. But when the output is being + * fed to a browser's Javascript, it makes for smaller output and the + * browser can handle the output just fine. + */ + void dropNullPlaceholders(); + public: // overridden from Writer virtual std::string write( const Value &root ); @@ -48,6 +55,7 @@ namespace Json { std::string document_; bool yamlCompatiblityEnabled_; + bool dropNullPlaceholders_; }; /** \brief Writes a Value in JSON format in a human friendly way. diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index b44def3..ccf323a 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -192,7 +192,8 @@ Writer::~Writer() // ////////////////////////////////////////////////////////////////// FastWriter::FastWriter() - : yamlCompatiblityEnabled_( false ) + : yamlCompatiblityEnabled_( false ), + dropNullPlaceholders_( false ) { } @@ -204,6 +205,13 @@ FastWriter::enableYAMLCompatibility() } +void +FastWriter::dropNullPlaceholders() +{ + dropNullPlaceholders_ = true; +} + + std::string FastWriter::write( const Value &root ) { @@ -220,7 +228,7 @@ FastWriter::writeValue( const Value &value ) switch ( value.type() ) { case nullValue: - document_ += "null"; + if (!dropNullPlaceholders_) document_ += "null"; break; case intValue: document_ += valueToString( value.asLargestInt() ); diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index c6ab619..3e28085 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -1399,6 +1399,23 @@ ValueTest::checkIsEqual( const Json::Value &x, const Json::Value &y ) JSONTEST_ASSERT( y.compare( x ) == 0 ); } + +struct WriterTest : JsonTest::TestCase +{ +}; + + +JSONTEST_FIXTURE( WriterTest, dropNullPlaceholders ) +{ + Json::FastWriter writer; + Json::Value nullValue; + JSONTEST_ASSERT( writer.write(nullValue) == "null\n" ); + + writer.dropNullPlaceholders(); + JSONTEST_ASSERT( writer.write(nullValue) == "\n" ); +} + + int main( int argc, const char *argv[] ) { JsonTest::Runner runner; @@ -1420,5 +1437,6 @@ int main( int argc, const char *argv[] ) JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareArray ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareObject ); JSONTEST_REGISTER_FIXTURE( runner, ValueTest, compareType ); + JSONTEST_REGISTER_FIXTURE( runner, WriterTest, dropNullPlaceholders ); return runner.runCommandLine( argc, argv ); } From ada1c2043950464ed1f6b8dcd52b49315e00137b Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 27 Jul 2012 09:06:40 +0000 Subject: [PATCH 250/268] Added missing "include/json/assertions.h" header in amalgamate.py. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@250 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/amalgamate.py | 1 + 1 file changed, 1 insertion(+) diff --git a/trunk/jsoncpp/amalgamate.py b/trunk/jsoncpp/amalgamate.py index eab724f..2ced51b 100644 --- a/trunk/jsoncpp/amalgamate.py +++ b/trunk/jsoncpp/amalgamate.py @@ -72,6 +72,7 @@ def amalgamate_source( source_top_dir=None, header.add_file( 'include/json/value.h' ) header.add_file( 'include/json/reader.h' ) header.add_file( 'include/json/writer.h' ) + header.add_file( 'include/json/assertions.h' ) header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) From ece210da8799a6b5af4bbab635c768dabd8741f1 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 20 Dec 2012 10:08:50 +0000 Subject: [PATCH 251/268] Fixed warning(error?) on #if testing value of _MSC_VER without checking that it was defined. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@251 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/lib_json/json_reader.cpp | 2 +- trunk/jsoncpp/src/lib_json/json_writer.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/src/lib_json/json_reader.cpp b/trunk/jsoncpp/src/lib_json/json_reader.cpp index 1f3873a..3e5df89 100644 --- a/trunk/jsoncpp/src/lib_json/json_reader.cpp +++ b/trunk/jsoncpp/src/lib_json/json_reader.cpp @@ -15,7 +15,7 @@ #include #include -#if _MSC_VER >= 1400 // VC++ 8.0 +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. #endif diff --git a/trunk/jsoncpp/src/lib_json/json_writer.cpp b/trunk/jsoncpp/src/lib_json/json_writer.cpp index ccf323a..bb76f7a 100644 --- a/trunk/jsoncpp/src/lib_json/json_writer.cpp +++ b/trunk/jsoncpp/src/lib_json/json_writer.cpp @@ -14,7 +14,7 @@ #include #include -#if _MSC_VER >= 1400 // VC++ 8.0 +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 #pragma warning( disable : 4996 ) // disable warning about strdup being deprecated. #endif From 588cf2f5ecd27b86686434ca9d5f99702ebfc46a Mon Sep 17 00:00:00 2001 From: blep Date: Mon, 18 Feb 2013 15:53:47 +0000 Subject: [PATCH 252/268] Fix gcc -Wall warnings (patch from Matt McCormick) git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@252 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/jsontest.h | 5 ++--- trunk/jsoncpp/src/test_lib_json/main.cpp | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 207692b..6dc7535 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -190,7 +190,7 @@ namespace JsonTest { checkEqual( TestResult &result, const T &expected, const U &actual, const char *file, unsigned int line, const char *expr ) { - if ( expected != actual ) + if ( static_cast< U >( expected ) != actual ) { result.addFailure( file, line, expr ); result << "Expected: " << expected << "\n"; @@ -229,8 +229,7 @@ namespace JsonTest { result_->predicateStackTail_ = &_minitest_Context; \ (expr); \ result_->popPredicateContext(); \ - } \ - *result_ + } /// \brief Asserts that two values are equals. #define JSONTEST_ASSERT_EQUAL( expected, actual ) \ diff --git a/trunk/jsoncpp/src/test_lib_json/main.cpp b/trunk/jsoncpp/src/test_lib_json/main.cpp index 3e28085..ce4f1f4 100644 --- a/trunk/jsoncpp/src/test_lib_json/main.cpp +++ b/trunk/jsoncpp/src/test_lib_json/main.cpp @@ -63,8 +63,8 @@ struct ValueTest : JsonTest::TestCase : emptyArray_( Json::arrayValue ) , emptyObject_( Json::objectValue ) , integer_( 123456789 ) - , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) , unsignedInteger_( 34567890u ) + , smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) ) , real_( 1234.56789 ) , float_( 0.00390625f ) , emptyString_( "" ) From d0eb9cb00af5a27f98753989042795739aa1a365 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Apr 2013 13:11:14 +0000 Subject: [PATCH 253/268] Patch #3539678: Copy constructor does not initialize allocated_ for stringValue (contributed by rmongia). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@253 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 5 +++++ trunk/jsoncpp/src/lib_json/json_value.cpp | 3 +++ 2 files changed, 8 insertions(+) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 8316ff6..39ed9ab 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -31,6 +31,11 @@ New in SVN representable using an Int64, or asDouble() combined with minInt64 and maxInt64 to figure out whether it is approximately representable. +* Bug fixes + - Patch #3539678: Copy constructor does not initialize allocated_ for stringValue + (contributed by rmongia). + + New in JsonCpp 0.6.0: --------------------- diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 91f312e..4f2e216 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -470,7 +470,10 @@ Value::Value( const Value &other ) allocated_ = true; } else + { value_.string_ = 0; + allocated_ = false; + } break; #ifndef JSON_VALUE_USE_INTERNAL_MAP case arrayValue: From 94f81d4832bf08082cfc2847a79e80053f0fffb9 Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Apr 2013 13:26:23 +0000 Subject: [PATCH 254/268] Patch #3600941: Missing field copy in Json::Value::iterator causing infinite loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP) (contributed by Ming-Lin Kao). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@254 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 3 +++ trunk/jsoncpp/src/lib_json/json_valueiterator.inl | 1 + 2 files changed, 4 insertions(+) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 39ed9ab..f09c71e 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -35,6 +35,9 @@ New in SVN - Patch #3539678: Copy constructor does not initialize allocated_ for stringValue (contributed by rmongia). + - Patch #3600941: Missing field copy in Json::Value::iterator causing infinite + loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP) + (contributed by Ming-Lin Kao). New in JsonCpp 0.6.0: diff --git a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl index 7457ca3..9b85580 100644 --- a/trunk/jsoncpp/src/lib_json/json_valueiterator.inl +++ b/trunk/jsoncpp/src/lib_json/json_valueiterator.inl @@ -149,6 +149,7 @@ ValueIteratorBase::copy( const SelfType &other ) { #ifndef JSON_VALUE_USE_INTERNAL_MAP current_ = other.current_; + isNull_ = other.isNull_; #else if ( isArray_ ) iterator_.array_ = other.iterator_.array_; From e7c24b8543458a7dbf015755225f8b43dee827bb Mon Sep 17 00:00:00 2001 From: blep Date: Fri, 12 Apr 2013 14:10:13 +0000 Subject: [PATCH 255/268] - Patch #3393345: BOOST_FOREACH compatibility. Made Json::iterator more standard compliant, added missing iterator_category and value_type typedefs (contribued by Robert A. Iannucci). - Patch #3474563: added missing JSON_API on some classes causing link issues when building as a dynamic library on Windows (contributed by Francis Bolduc). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@255 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 11 +++++++++++ trunk/jsoncpp/doc/roadmap.dox | 1 + trunk/jsoncpp/include/json/value.h | 13 ++++++++----- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index f09c71e..ae4005d 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -31,6 +31,17 @@ New in SVN representable using an Int64, or asDouble() combined with minInt64 and maxInt64 to figure out whether it is approximately representable. +* Value + - Patch #3393345: BOOST_FOREACH compatibility. Made Json::iterator more + standard compliant, added missing iterator_category and value_type + typedefs (contribued by Robert A. Iannucci). + +* Compilation + + - Patch #3474563: added missing JSON_API on some classes causing link issues + when building as a dynamic library on Windows + (contributed by Francis Bolduc). + * Bug fixes - Patch #3539678: Copy constructor does not initialize allocated_ for stringValue (contributed by rmongia). diff --git a/trunk/jsoncpp/doc/roadmap.dox b/trunk/jsoncpp/doc/roadmap.dox index e6fc17a..0c29a90 100644 --- a/trunk/jsoncpp/doc/roadmap.dox +++ b/trunk/jsoncpp/doc/roadmap.dox @@ -2,6 +2,7 @@ \section ms_release Makes JsonCpp ready for release - Build system clean-up: - Fix build on Windows (shared-library build is broken) + - Compile and run tests using shared library on Windows to ensure no JSON_API macro is missing. - Add enable/disable flag for static and shared library build - Enhance help - Platform portability check: (Notes: was ok on last check) diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index b013c9b..6daa8d1 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -507,7 +507,7 @@ namespace Json { /** \brief Experimental and untested: represents an element of the "path" to access a node. */ - class PathArgument + class JSON_API PathArgument { public: friend class Path; @@ -540,7 +540,7 @@ namespace Json { * - ".%" => member name is provided as parameter * - ".[%]" => index is provied as parameter */ - class Path + class JSON_API Path { public: Path( const std::string &path, @@ -916,9 +916,10 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator /** \brief base class for Value iterators. * */ - class ValueIteratorBase + class JSON_API ValueIteratorBase { public: + typedef std::bidirectional_iterator_tag iterator_category; typedef unsigned int size_t; typedef int difference_type; typedef ValueIteratorBase SelfType; @@ -986,10 +987,11 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator /** \brief const iterator for object and array value. * */ - class ValueConstIterator : public ValueIteratorBase + class JSON_API ValueConstIterator : public ValueIteratorBase { friend class Value; public: + typedef const Value value_type; typedef unsigned int size_t; typedef int difference_type; typedef const Value &reference; @@ -1044,10 +1046,11 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator /** \brief Iterator for object and array value. */ - class ValueIterator : public ValueIteratorBase + class JSON_API ValueIterator : public ValueIteratorBase { friend class Value; public: + typedef Value value_type; typedef unsigned int size_t; typedef int difference_type; typedef Value &reference; From f89b8454c713225136f37e92630e3d11d10a5cc2 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 20:21:11 +0000 Subject: [PATCH 256/268] - New CMake based build system. Based in part on contribution from Igor Okulist and Damien Buhl (Patch #14). Added support for running tests and building with DLL on Windows. - added missing JSON_API - Visual Studio DLL: suppressed warning "C4251: : needs to have dll-interface to be used by..." via pragma push/pop in json-cpp headers. - New header json/version.h now contains version number macros (JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH and JSONCPP_VERSION_HEXA). While this header is generated by CMake, it is committed to ease build with alternate build system (CMake only update the file when it changes avoid issues with VCS). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@256 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/CMakeLists.txt | 71 +++++++++++++++++++ trunk/jsoncpp/NEWS.txt | 19 +++-- trunk/jsoncpp/README.txt | 70 ++++++++++++++---- trunk/jsoncpp/amalgamate.py | 1 + trunk/jsoncpp/include/CMakeLists.txt | 2 + trunk/jsoncpp/include/json/config.h | 13 +++- trunk/jsoncpp/include/json/reader.h | 14 +++- trunk/jsoncpp/include/json/value.h | 12 ++++ trunk/jsoncpp/include/json/version.h | 14 ++++ trunk/jsoncpp/include/json/writer.h | 13 +++- trunk/jsoncpp/src/CMakeLists.txt | 5 ++ .../jsoncpp/src/jsontestrunner/CMakeLists.txt | 23 ++++++ trunk/jsoncpp/src/lib_json/CMakeLists.txt | 43 +++++++++++ trunk/jsoncpp/src/lib_json/version.h.in | 14 ++++ .../jsoncpp/src/test_lib_json/CMakeLists.txt | 21 ++++++ 15 files changed, 314 insertions(+), 21 deletions(-) create mode 100644 trunk/jsoncpp/CMakeLists.txt create mode 100644 trunk/jsoncpp/include/CMakeLists.txt create mode 100644 trunk/jsoncpp/include/json/version.h create mode 100644 trunk/jsoncpp/src/CMakeLists.txt create mode 100644 trunk/jsoncpp/src/jsontestrunner/CMakeLists.txt create mode 100644 trunk/jsoncpp/src/lib_json/CMakeLists.txt create mode 100644 trunk/jsoncpp/src/lib_json/version.h.in create mode 100644 trunk/jsoncpp/src/test_lib_json/CMakeLists.txt diff --git a/trunk/jsoncpp/CMakeLists.txt b/trunk/jsoncpp/CMakeLists.txt new file mode 100644 index 0000000..21c0ebe --- /dev/null +++ b/trunk/jsoncpp/CMakeLists.txt @@ -0,0 +1,71 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.6) +PROJECT(jsoncpp) +ENABLE_TESTING() + +OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON) +OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON) + +# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix +IF(NOT WIN32) + IF(NOT CMAKE_BUILD_TYPE) + SET(CMAKE_BUILD_TYPE Release CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage." + FORCE) + ENDIF(NOT CMAKE_BUILD_TYPE) +ENDIF(NOT WIN32) + +# This ensures shared DLL are in the same dir as executable on Windows. +# Put all executables / libraries are in a project global directory. +SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib + CACHE PATH "Single directory for all static libraries.") +SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib + CACHE PATH "Single directory for all dynamic libraries on Unix.") +SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin + CACHE PATH "Single directory for all executable and dynamic libraries on Windows.") +MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY ) + +# Set variable named ${VAR_NAME} to value ${VALUE} +FUNCTION(set_using_dynamic_name VAR_NAME VALUE) + SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE) +ENDFUNCTION(set_using_dynamic_name) + +# Extract major, minor, patch and qualifier from version text +# Parse a version string "X.Y.Z[-qualifier]" and outputs +# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH, _QUALIFIER. +# If parse succed then ${OUPUT_PREFIX}_FOUND is TRUE. +MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX) + SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?") + IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) + STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT}) + list(APPEND VERSION_PARTS "") # empty qualifier to handle no qualifier case + LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR) + LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR) + LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH) + LIST(GET VERSION_PARTS 3 ${OUPUT_PREFIX}_QUALIFIER) + set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE ) + ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) + set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE ) + ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) +ENDMACRO(jsoncpp_parse_version) + +# Read out version from "version" file +FILE(STRINGS "version" JSONCPP_VERSION) + +jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION ) +IF(NOT JSONCPP_VERSION_FOUND) + MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z[-qualifier]") +ENDIF(NOT JSONCPP_VERSION_FOUND) + +MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}${JSONCPP_VERSION_QUALIFIER}") +# File version.h is only regenerated on CMake configure step +CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in" + "${PROJECT_SOURCE_DIR}/include/json/version.h" ) + +# Include our configuration header +INCLUDE_DIRECTORIES( ${CMAKE_SOURCE_DIR}/include ) + +# Build the different applications +ADD_SUBDIRECTORY( src ) + +#install the includes +ADD_SUBDIRECTORY( include ) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index ae4005d..193d166 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -32,21 +32,32 @@ New in SVN maxInt64 to figure out whether it is approximately representable. * Value - - Patch #3393345: BOOST_FOREACH compatibility. Made Json::iterator more + - Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more standard compliant, added missing iterator_category and value_type typedefs (contribued by Robert A. Iannucci). * Compilation - - Patch #3474563: added missing JSON_API on some classes causing link issues + - New CMake based build system. Based in part on contribution from + Igor Okulist and Damien Buhl (Patch #14). + + - New header json/version.h now contains version number macros + (JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH + and JSONCPP_VERSION_HEXA). + + - Patch #11: added missing JSON_API on some classes causing link issues when building as a dynamic library on Windows (contributed by Francis Bolduc). + + - Visual Studio DLL: suppressed warning "C4251: : + needs to have dll-interface to be used by..." via pragma push/pop + in json-cpp headers. * Bug fixes - - Patch #3539678: Copy constructor does not initialize allocated_ for stringValue + - Patch #15: Copy constructor does not initialize allocated_ for stringValue (contributed by rmongia). - - Patch #3600941: Missing field copy in Json::Value::iterator causing infinite + - Patch #16: Missing field copy in Json::Value::iterator causing infinite loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP) (contributed by Ming-Lin Kao). diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index 88c1178..ca3a486 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -13,9 +13,65 @@ making it a convenient format to store user input files. Unserialization parsing is user friendly and provides precise error reports. +* Using json-cpp in your project: + =============================== + +The recommended approach to integrate json-cpp in your project is to +build the the amalgamated source (a single .cpp) with your own build +system. This ensures compilation flags consistency and ABI compatibility. + +See section "Generating amalgamated source and header" to generate them +from the source distribution. + +Directory include/ should be added to your compiler include path. +json-cpp headers should be included as follow: + + #include + +If json-cpp was build as a dynamic library on Windows, then your project +need to define macro "JSON_DLL" to JSON_API should import exported symbols. + +* Building/Testing with new CMake build system: + ============================================= + +CMake is a C++ Makefiles/Solution generator that can be downloaded from: + http://www.cmake.org + +It is usually available on most Linux system as package. On Ubuntu: + sudo apt-get install cmake + +Notes that python is also required to run JSON reader/writer tests. If +missing, the build will skip running those tests. + +When running CMake, a few parameters are required: +- a build directory where the makefiles/solution are generated. It is + also used to store objects, libraries and executables files. +- the generator to use: makefiles or Visual Studio solution? What version + or Visual Studio, 32 or 64 bits solution? + +Generating solution/makefiles using cmake-gui: +- Makes "source code" points the source directory +- Makes "where to build the binary" points to the directory to use for + the build. +- Click on the "Grouped" check box +- Review JsonCpp build option (tick JSONCPP_LIB_BUILD_SHARED to build as + a dynamic library) +- Click configure button at the bottom, then the generate button. +- The generated solution/makefiles can be found in the binary directory. + +Alternatively, from the command-line on Unix in the source directory: + + mkdir -p ../build/debug + cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../build/debug + (cd ../build/debug && make) + +Running "cmake -h" will display the list of available generators (passed as -G option). + +By default CMake hides compilation command-line. This can be modified by specifying: +-DCMAKE_VERBOSE_MAKEFILE=true when generating makefiles. -* Building/Testing: - ================= +* Building/Testing with the legacy build system based on SCons: + ============================================================= JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires python to be installed (http://www.python.org). @@ -47,7 +103,6 @@ to do so. and TARGET may be: check: build library and run unit tests. - * Running the test manually: ========================== @@ -115,15 +170,6 @@ The amalgamated sources are generated by concatenating JsonCpp source in the correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion of other headers. -* Using json-cpp in your project: - =============================== - -include/ should be added to your compiler include path. jsoncpp headers -should be included as follow: - -#include - - * Adding a reader/writer test: ============================ diff --git a/trunk/jsoncpp/amalgamate.py b/trunk/jsoncpp/amalgamate.py index 2ced51b..5222655 100644 --- a/trunk/jsoncpp/amalgamate.py +++ b/trunk/jsoncpp/amalgamate.py @@ -66,6 +66,7 @@ def amalgamate_source( source_top_dir=None, header.add_text( '/// If defined, indicates that the source file is amalgated' ) header.add_text( '/// to prevent private header inclusion.' ) header.add_text( '#define JSON_IS_AMALGAMATION' ) + header.add_file( 'include/json/version.h' ) header.add_file( 'include/json/config.h' ) header.add_file( 'include/json/forwards.h' ) header.add_file( 'include/json/features.h' ) diff --git a/trunk/jsoncpp/include/CMakeLists.txt b/trunk/jsoncpp/include/CMakeLists.txt new file mode 100644 index 0000000..7d832a0 --- /dev/null +++ b/trunk/jsoncpp/include/CMakeLists.txt @@ -0,0 +1,2 @@ +FILE(GLOB INCLUDE_FILES "json/*.h") +INSTALL(FILES ${INCLUDE_FILES} DESTINATION include/json) diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index 72437c4..c9b298d 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -46,10 +46,17 @@ # ifdef JSON_IN_CPPTL # define JSON_API CPPTL_API # elif defined(JSON_DLL_BUILD) -# define JSON_API __declspec(dllexport) +# if defined(_MSC_VER) +# define JSON_API __declspec(dllexport) +# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +# endif // if defined(_MSC_VER) # elif defined(JSON_DLL) -# define JSON_API __declspec(dllimport) -# else +# if defined(_MSC_VER) +# define JSON_API __declspec(dllimport) +# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +# endif // if defined(_MSC_VER) +# endif // ifdef JSON_IN_CPPTL +# if !defined(JSON_API) # define JSON_API # endif diff --git a/trunk/jsoncpp/include/json/reader.h b/trunk/jsoncpp/include/json/reader.h index a3023b3..189da57 100644 --- a/trunk/jsoncpp/include/json/reader.h +++ b/trunk/jsoncpp/include/json/reader.h @@ -14,6 +14,13 @@ # include # include +// Disable warning C4251: : needs to have dll-interface to be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(push) +# pragma warning(disable:4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + + namespace Json { /** \brief Unserialize a JSON document into a Value. @@ -206,8 +213,13 @@ namespace Json { \throw std::exception on parse error. \see Json::operator<<() */ - std::istream& operator>>( std::istream&, Value& ); + JSON_API std::istream& operator>>( std::istream&, Value& ); } // namespace Json +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + + #endif // CPPTL_JSON_READER_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/value.h b/trunk/jsoncpp/include/json/value.h index 6daa8d1..bd7f181 100644 --- a/trunk/jsoncpp/include/json/value.h +++ b/trunk/jsoncpp/include/json/value.h @@ -21,6 +21,13 @@ # include # endif +// Disable warning C4251: : needs to have dll-interface to be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(push) +# pragma warning(disable:4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + + /** \brief JSON (JavaScript Object Notation). */ namespace Json { @@ -1109,4 +1116,9 @@ class DefaultValueArrayAllocator : public ValueArrayAllocator } // namespace Json +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + + #endif // CPPTL_JSON_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/version.h b/trunk/jsoncpp/include/json/version.h new file mode 100644 index 0000000..9e1efcc --- /dev/null +++ b/trunk/jsoncpp/include/json/version.h @@ -0,0 +1,14 @@ +// DO NOT EDIT. This file is generated by CMake from "version" +// and "version.h.in" files. +// Run CMake configure step to update it. +#ifndef JSON_VERSION_H_INCLUDED +# define JSON_VERSION_H_INCLUDED + +# define JSONCPP_VERSION_STRING "0.6.0-dev" +# define JSONCPP_VERSION_MAJOR 0 +# define JSONCPP_VERSION_MINOR 6 +# define JSONCPP_VERSION_PATCH 0 +# define JSONCPP_VERSION_QUALIFIER -dev +# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) + +#endif // JSON_VERSION_H_INCLUDED diff --git a/trunk/jsoncpp/include/json/writer.h b/trunk/jsoncpp/include/json/writer.h index 46d5ccc..23ebd50 100644 --- a/trunk/jsoncpp/include/json/writer.h +++ b/trunk/jsoncpp/include/json/writer.h @@ -12,6 +12,13 @@ # include # include +// Disable warning C4251: : needs to have dll-interface to be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(push) +# pragma warning(disable:4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + + namespace Json { class Value; @@ -183,10 +190,14 @@ namespace Json { /// \brief Output using the StyledStreamWriter. /// \see Json::operator>>() - std::ostream& operator<<( std::ostream&, const Value &root ); + JSON_API std::ostream& operator<<( std::ostream&, const Value &root ); } // namespace Json +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +# pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + #endif // JSON_WRITER_H_INCLUDED diff --git a/trunk/jsoncpp/src/CMakeLists.txt b/trunk/jsoncpp/src/CMakeLists.txt new file mode 100644 index 0000000..608d3f7 --- /dev/null +++ b/trunk/jsoncpp/src/CMakeLists.txt @@ -0,0 +1,5 @@ +ADD_SUBDIRECTORY(lib_json) +IF(JSONCPP_WITH_TESTS) + ADD_SUBDIRECTORY(jsontestrunner) + ADD_SUBDIRECTORY(test_lib_json) +ENDIF(JSONCPP_WITH_TESTS) diff --git a/trunk/jsoncpp/src/jsontestrunner/CMakeLists.txt b/trunk/jsoncpp/src/jsontestrunner/CMakeLists.txt new file mode 100644 index 0000000..7ab2f00 --- /dev/null +++ b/trunk/jsoncpp/src/jsontestrunner/CMakeLists.txt @@ -0,0 +1,23 @@ +FIND_PACKAGE(PythonInterp 2.6 REQUIRED) + +IF(JSONCPP_LIB_BUILD_SHARED) + ADD_DEFINITIONS( -DJSON_DLL ) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + +ADD_EXECUTABLE(jsontestrunner_exe + main.cpp + ) +TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib) +SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe) + +IF(PYTHONINTERP_FOUND) + # Run end to end parser/writer tests + GET_PROPERTY(JSONTESTRUNNER_EXE_PATH TARGET jsontestrunner_exe PROPERTY LOCATION) + SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test) + SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py) + ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL + "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" "${JSONTESTRUNNER_EXE_PATH}" "${TEST_DIR}/data" + DEPENDS jsontestrunner_exe jsoncpp_test + ) + ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests) +ENDIF(PYTHONINTERP_FOUND) diff --git a/trunk/jsoncpp/src/lib_json/CMakeLists.txt b/trunk/jsoncpp/src/lib_json/CMakeLists.txt new file mode 100644 index 0000000..39aa0b3 --- /dev/null +++ b/trunk/jsoncpp/src/lib_json/CMakeLists.txt @@ -0,0 +1,43 @@ +OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF) +IF(JSONCPP_LIB_BUILD_SHARED) + SET(JSONCPP_LIB_TYPE SHARED) + ADD_DEFINITIONS( -DJSON_DLL_BUILD ) +ELSE(JSONCPP_LIB_BUILD_SHARED) + SET(JSONCPP_LIB_TYPE STATIC) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + + +SET( JSONCPP_INCLUDE_DIR ../../include ) + +SET( PUBLIC_HEADERS + ${JSONCPP_INCLUDE_DIR}/json/config.h + ${JSONCPP_INCLUDE_DIR}/json/forwards.h + ${JSONCPP_INCLUDE_DIR}/json/features.h + ${JSONCPP_INCLUDE_DIR}/json/value.h + ${JSONCPP_INCLUDE_DIR}/json/reader.h + ${JSONCPP_INCLUDE_DIR}/json/writer.h + ${JSONCPP_INCLUDE_DIR}/json/assertions.h + ${JSONCPP_INCLUDE_DIR}/json/version.h + ) + +SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} ) + +ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE} + ${PUBLIC_HEADERS} + json_tool.h + json_reader.cpp + json_batchallocator.h + json_valueiterator.inl + json_value.cpp + json_writer.cpp + version.h.in + ) +SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp ) +SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSON_CPP_VERSION} SOVERSION ${JSON_CPP_VERSION} ) + +# Install instructions for this target +INSTALL( TARGETS jsoncpp_lib + RUNTIME DESTINATION bin + LIBRARY DESTINATION lib + ARCHIVE DESTINATION lib +) diff --git a/trunk/jsoncpp/src/lib_json/version.h.in b/trunk/jsoncpp/src/lib_json/version.h.in new file mode 100644 index 0000000..217bcff --- /dev/null +++ b/trunk/jsoncpp/src/lib_json/version.h.in @@ -0,0 +1,14 @@ +// DO NOT EDIT. This file is generated by CMake from "version" +// and "version.h.in" files. +// Run CMake configure step to update it. +#ifndef JSON_VERSION_H_INCLUDED +# define JSON_VERSION_H_INCLUDED + +# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@" +# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@ +# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@ +# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@ +# define JSONCPP_VERSION_QUALIFIER @JSONCPP_VERSION_QUALIFIER@ +# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) + +#endif // JSON_VERSION_H_INCLUDED diff --git a/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt b/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt new file mode 100644 index 0000000..563f713 --- /dev/null +++ b/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt @@ -0,0 +1,21 @@ + +IF(JSONCPP_LIB_BUILD_SHARED) + ADD_DEFINITIONS( -DJSON_DLL ) +ENDIF(JSONCPP_LIB_BUILD_SHARED) + +ADD_EXECUTABLE( jsoncpp_test + jsontest.cpp + main.cpp + ) + +TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib) + +# Run unit tests in post-build +# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?) +IF(JSONCPP_WITH_POST_BUILD_UNITTEST) + ADD_CUSTOM_COMMAND( TARGET jsoncpp_test + POST_BUILD + COMMAND jsoncpp_test) +ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST) + +SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test) From fe8ad6b7c50a122af65ff51b4d8e078d35f9bf9b Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 20:37:54 +0000 Subject: [PATCH 257/268] Fixed CMake / Unix build instructions. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@257 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/README.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/trunk/jsoncpp/README.txt b/trunk/jsoncpp/README.txt index ca3a486..6128e3e 100644 --- a/trunk/jsoncpp/README.txt +++ b/trunk/jsoncpp/README.txt @@ -62,8 +62,9 @@ Generating solution/makefiles using cmake-gui: Alternatively, from the command-line on Unix in the source directory: mkdir -p ../build/debug - cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../build/debug - (cd ../build/debug && make) + cd ../build/debug + cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../../jsoncpp-src + make Running "cmake -h" will display the list of available generators (passed as -G option). From a30826f3326f0bb678b0905e084360890691a319 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 20:46:56 +0000 Subject: [PATCH 258/268] Added basic Travis CI integration contributed by Igor Okulist. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@258 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 trunk/jsoncpp/.travis.yml diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml new file mode 100644 index 0000000..4a32162 --- /dev/null +++ b/trunk/jsoncpp/.travis.yml @@ -0,0 +1,4 @@ +before_install: sudo apt-get install cmake +language: cpp +compiler: gcc +script: cmake . && make From 3dcc7b12c6700edced7eb9e0ac9c0666ccad882d Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 21:04:42 +0000 Subject: [PATCH 259/268] Added clang compiler for continuous integration. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@259 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml index 4a32162..0672790 100644 --- a/trunk/jsoncpp/.travis.yml +++ b/trunk/jsoncpp/.travis.yml @@ -1,4 +1,6 @@ before_install: sudo apt-get install cmake language: cpp -compiler: gcc +compiler: + - gcc + - clang script: cmake . && make From 1b1b1c959be032a95ebfb588617f151df3bee117 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 21:23:52 +0000 Subject: [PATCH 260/268] Added continuous integration failure e-mail notification. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@260 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml index 0672790..a652b80 100644 --- a/trunk/jsoncpp/.travis.yml +++ b/trunk/jsoncpp/.travis.yml @@ -4,3 +4,9 @@ compiler: - gcc - clang script: cmake . && make +notifications: + recipients: + - baptiste.lepilleur@gmail.com + email: + on_success: change + on_failure: always From 3e6c709d058ced5b36a021a5e07932df2b4a772e Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 22:04:57 +0000 Subject: [PATCH 261/268] Added continuous integration matrix for shared/static library (specified through environment variables). git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@261 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml index a652b80..13188fd 100644 --- a/trunk/jsoncpp/.travis.yml +++ b/trunk/jsoncpp/.travis.yml @@ -1,9 +1,17 @@ +# Build matrix / environment variable are explained on: +# http://about.travis-ci.org/docs/user/build-configuration/ before_install: sudo apt-get install cmake language: cpp compiler: - gcc - clang -script: cmake . && make +script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY . && make +env: + global: + - JSONCPP_CONTINUOUS_INTERATION=1 + matrix: + - SHARED_LIBRARY=ON + - SHARED_LIBRARY=OFF notifications: recipients: - baptiste.lepilleur@gmail.com From 0b1c502feded5bf7ca28fa626fc4878c56c8b837 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 22:15:15 +0000 Subject: [PATCH 262/268] Added continuous integration matrix for debug/release build. Made static debug build verbose. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@262 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml index 13188fd..df92893 100644 --- a/trunk/jsoncpp/.travis.yml +++ b/trunk/jsoncpp/.travis.yml @@ -1,17 +1,20 @@ # Build matrix / environment variable are explained on: # http://about.travis-ci.org/docs/user/build-configuration/ +# This file can be validated on: +# http://lint.travis-ci.org/ before_install: sudo apt-get install cmake language: cpp compiler: - gcc - clang -script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY . && make +script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE. && make env: global: - JSONCPP_CONTINUOUS_INTERATION=1 matrix: - - SHARED_LIBRARY=ON - - SHARED_LIBRARY=OFF + - SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false + - SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false + - SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true notifications: recipients: - baptiste.lepilleur@gmail.com From 267fcef8f0b433c377fe9391efff89643ee55dd9 Mon Sep 17 00:00:00 2001 From: blep Date: Wed, 8 May 2013 22:23:07 +0000 Subject: [PATCH 263/268] Fixed continuous integration matrix for debug/release build. Made static debug build verbose. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@263 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/.travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trunk/jsoncpp/.travis.yml b/trunk/jsoncpp/.travis.yml index df92893..0fa2305 100644 --- a/trunk/jsoncpp/.travis.yml +++ b/trunk/jsoncpp/.travis.yml @@ -7,7 +7,7 @@ language: cpp compiler: - gcc - clang -script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE. && make +script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make env: global: - JSONCPP_CONTINUOUS_INTERATION=1 From 90612494a1511242a05ea9e45fb83877cc7f3e9c Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 9 May 2013 15:20:32 +0000 Subject: [PATCH 264/268] git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@264 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/NEWS.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/trunk/jsoncpp/NEWS.txt b/trunk/jsoncpp/NEWS.txt index 193d166..1be7b8e 100644 --- a/trunk/jsoncpp/NEWS.txt +++ b/trunk/jsoncpp/NEWS.txt @@ -52,6 +52,8 @@ New in SVN - Visual Studio DLL: suppressed warning "C4251: : needs to have dll-interface to be used by..." via pragma push/pop in json-cpp headers. + + - Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror * Bug fixes - Patch #15: Copy constructor does not initialize allocated_ for stringValue From 4912951ef4ea88871bee43befa807045e33d4618 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 9 May 2013 15:21:06 +0000 Subject: [PATCH 265/268] Added missing source file to CMakeLists.txt. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@265 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/src/test_lib_json/CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt b/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt index 563f713..7fb5602 100644 --- a/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt +++ b/trunk/jsoncpp/src/test_lib_json/CMakeLists.txt @@ -5,6 +5,7 @@ ENDIF(JSONCPP_LIB_BUILD_SHARED) ADD_EXECUTABLE( jsoncpp_test jsontest.cpp + jsontest.h main.cpp ) From a30e22fabfe4e34e324b58880eb589b1550838bf Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 9 May 2013 15:22:14 +0000 Subject: [PATCH 266/268] Added simple batch build script for CMake. git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@266 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/devtools/agent_vmw7.json | 33 +++ trunk/jsoncpp/devtools/batchbuild.py | 280 +++++++++++++++++++++++++ 2 files changed, 313 insertions(+) create mode 100644 trunk/jsoncpp/devtools/agent_vmw7.json create mode 100644 trunk/jsoncpp/devtools/batchbuild.py diff --git a/trunk/jsoncpp/devtools/agent_vmw7.json b/trunk/jsoncpp/devtools/agent_vmw7.json new file mode 100644 index 0000000..38b50d9 --- /dev/null +++ b/trunk/jsoncpp/devtools/agent_vmw7.json @@ -0,0 +1,33 @@ +{ + "cmake_variants" : [ + {"name": "generator", + "generators": [ + {"generator": [ + "Visual Studio 7 .NET 2003", + "Visual Studio 9 2008", + "Visual Studio 9 2008 Win64", + "Visual Studio 10", + "Visual Studio 10 Win64", + "Visual Studio 11", + "Visual Studio 11 Win64" + ] + }, + {"generator": ["MinGW Makefiles"], + "env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}] + } + ] + }, + {"name": "shared_dll", + "variables": [ + ["JSONCPP_LIB_BUILD_SHARED=true"], + ["JSONCPP_LIB_BUILD_SHARED=false"] + ] + }, + {"name": "build_type", + "build_types": [ + "debug", + "release" + ] + } + ] +} diff --git a/trunk/jsoncpp/devtools/batchbuild.py b/trunk/jsoncpp/devtools/batchbuild.py new file mode 100644 index 0000000..5da74db --- /dev/null +++ b/trunk/jsoncpp/devtools/batchbuild.py @@ -0,0 +1,280 @@ +import collections +import itertools +import json +import os +import os.path +import re +import shutil +import string +import subprocess +import sys +import cgi + +class BuildDesc: + def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None): + self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ] + self.variables = variables or [] + self.build_type = build_type + self.generator = generator + + def merged_with( self, build_desc ): + """Returns a new BuildDesc by merging field content. + Prefer build_desc fields to self fields for single valued field. + """ + return BuildDesc( self.prepend_envs + build_desc.prepend_envs, + self.variables + build_desc.variables, + build_desc.build_type or self.build_type, + build_desc.generator or self.generator ) + + def env( self ): + environ = os.environ.copy() + for values_by_name in self.prepend_envs: + for var, value in values_by_name.items(): + var = var.upper() + if type(value) is unicode: + value = value.encode( sys.getdefaultencoding() ) + if var in environ: + environ[var] = value + os.pathsep + environ[var] + else: + environ[var] = value + return environ + + def cmake_args( self ): + args = ["-D%s" % var for var in self.variables] + # skip build type for Visual Studio solution as it cause warning + if self.build_type and 'Visual' not in self.generator: + args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type ) + if self.generator: + args.extend( ['-G', self.generator] ) + return args + + def __repr__( self ): + return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type) + +class BuildData: + def __init__( self, desc, work_dir, source_dir ): + self.desc = desc + self.work_dir = work_dir + self.source_dir = source_dir + self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' ) + self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' ) + self.cmake_succeeded = False + self.build_succeeded = False + + def execute_build(self): + print 'Build %s' % self.desc + self._make_new_work_dir( ) + self.cmake_succeeded = self._generate_makefiles( ) + if self.cmake_succeeded: + self.build_succeeded = self._build_using_makefiles( ) + return self.build_succeeded + + def _generate_makefiles(self): + print ' Generating makefiles: ', + cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )] + succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path ) + print 'done' if succeeded else 'FAILED' + return succeeded + + def _build_using_makefiles(self): + print ' Building:', + cmd = ['cmake', '--build', self.work_dir] + if self.desc.build_type: + cmd += ['--config', self.desc.build_type] + succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path ) + print 'done' if succeeded else 'FAILED' + return succeeded + + def _execute_build_subprocess(self, cmd, env, log_path): + process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, + env=env ) + stdout, _ = process.communicate( ) + succeeded = (process.returncode == 0) + with open( log_path, 'wb' ) as flog: + log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode + flog.write( fix_eol( log ) ) + return succeeded + + def _make_new_work_dir(self): + if os.path.isdir( self.work_dir ): + print ' Removing work directory', self.work_dir + shutil.rmtree( self.work_dir, ignore_errors=True ) + if not os.path.isdir( self.work_dir ): + os.makedirs( self.work_dir ) + +def fix_eol( stdout ): + """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). + """ + return re.sub( '\r*\n', os.linesep, stdout ) + +def load_build_variants_from_config( config_path ): + with open( config_path, 'rb' ) as fconfig: + data = json.load( fconfig ) + variants = data[ 'cmake_variants' ] + build_descs_by_axis = collections.defaultdict( list ) + for axis in variants: + axis_name = axis["name"] + build_descs = [] + if "generators" in axis: + for generator_data in axis["generators"]: + for generator in generator_data["generator"]: + build_desc = BuildDesc( generator=generator, + prepend_envs=generator_data.get("env_prepend") ) + build_descs.append( build_desc ) + elif "variables" in axis: + for variables in axis["variables"]: + build_desc = BuildDesc( variables=variables ) + build_descs.append( build_desc ) + elif "build_types" in axis: + for build_type in axis["build_types"]: + build_desc = BuildDesc( build_type=build_type ) + build_descs.append( build_desc ) + build_descs_by_axis[axis_name].extend( build_descs ) + return build_descs_by_axis + +def generate_build_variants( build_descs_by_axis ): + """Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" + axis_names = build_descs_by_axis.keys() + build_descs = [] + for axis_name, axis_build_descs in build_descs_by_axis.items(): + if len(build_descs): + # for each existing build_desc and each axis build desc, create a new build_desc + new_build_descs = [] + for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs): + new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) ) + build_descs = new_build_descs + else: + build_descs = axis_build_descs + return build_descs + +HTML_TEMPLATE = string.Template(''' + + $title + + + + + + + + $th_vars + + + + $th_build_types + + + +$tr_builds + +
Variables
Build type
+''') + +def generate_html_report( html_report_path, builds ): + report_dir = os.path.dirname( html_report_path ) + # Vertical axis: generator + # Horizontal: variables, then build_type + builds_by_generator = collections.defaultdict( list ) + variables = set() + build_types_by_variable = collections.defaultdict( set ) + build_by_pos_key = {} # { (generator, var_key, build_type): build } + for build in builds: + builds_by_generator[build.desc.generator].append( build ) + var_key = tuple(sorted(build.desc.variables)) + variables.add( var_key ) + build_types_by_variable[var_key].add( build.desc.build_type ) + pos_key = (build.desc.generator, var_key, build.desc.build_type) + build_by_pos_key[pos_key] = build + variables = sorted( variables ) + th_vars = [] + th_build_types = [] + for variable in variables: + build_types = sorted( build_types_by_variable[variable] ) + nb_build_type = len(build_types_by_variable[variable]) + th_vars.append( '%s' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) ) + for build_type in build_types: + th_build_types.append( '%s' % cgi.escape(build_type) ) + tr_builds = [] + for generator in sorted( builds_by_generator ): + tds = [ '%s\n' % cgi.escape( generator ) ] + for variable in variables: + build_types = sorted( build_types_by_variable[variable] ) + for build_type in build_types: + pos_key = (generator, variable, build_type) + build = build_by_pos_key.get(pos_key) + if build: + cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' + build_status = 'ok' if build.build_succeeded else 'FAILED' + cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir ) + build_log_url = os.path.relpath( build.build_log_path, report_dir ) + td = 'CMake: %s' % ( + build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) + if build.cmake_succeeded: + td += '
Build: %s' % ( + build_log_url, build_status.lower(), build_status) + td += '' + else: + td = '' + tds.append( td ) + tr_builds.append( '%s' % '\n'.join( tds ) ) + html = HTML_TEMPLATE.substitute( + title='Batch build report', + th_vars=' '.join(th_vars), + th_build_types=' '.join( th_build_types), + tr_builds='\n'.join( tr_builds ) ) + with open( html_report_path, 'wt' ) as fhtml: + fhtml.write( html ) + print 'HTML report generated in:', html_report_path + +def main(): + usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...] +Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run +as described in CONFIG_JSON_PATH building in WORK_DIR. + +Example of call: +python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json +""" + from optparse import OptionParser + parser = OptionParser(usage=usage) + parser.allow_interspersed_args = True +# parser.add_option('-v', '--verbose', dest="verbose", action='store_true', +# help="""Be verbose.""") + parser.enable_interspersed_args() + options, args = parser.parse_args() + if len(args) < 3: + parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." ) + work_dir = args[0] + source_dir = args[1].rstrip('/\\') + config_paths = args[2:] + for config_path in config_paths: + if not os.path.isfile( config_path ): + parser.error( "Can not read: %r" % config_path ) + + # generate build variants + build_descs = [] + for config_path in config_paths: + build_descs_by_axis = load_build_variants_from_config( config_path ) + build_descs.extend( generate_build_variants( build_descs_by_axis ) ) + print 'Build variants (%d):' % len(build_descs) + # assign build directory for each variant + if not os.path.isdir( work_dir ): + os.makedirs( work_dir ) + builds = [] + with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap: + for index, build_desc in enumerate( build_descs ): + build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) ) + builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) ) + fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) ) + for build in builds: + build.execute_build() + html_report_path = os.path.join( work_dir, 'batchbuild-report.html' ) + generate_html_report( html_report_path, builds ) + print 'Done' + + +if __name__ == '__main__': + main() + From 83ae8bf464b00ed1fec64ae2006d2b01a0277a8f Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 9 May 2013 16:24:13 +0000 Subject: [PATCH 267/268] - disabled warning 4786 for VS6 caused by STL (identifier was truncated to '255' characters in the debug information) - added batchbuild config for XP VM git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@267 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/devtools/agent_vmxp.json | 26 ++++++++++++++++++++++++++ trunk/jsoncpp/include/json/config.h | 3 +++ 2 files changed, 29 insertions(+) create mode 100644 trunk/jsoncpp/devtools/agent_vmxp.json diff --git a/trunk/jsoncpp/devtools/agent_vmxp.json b/trunk/jsoncpp/devtools/agent_vmxp.json new file mode 100644 index 0000000..61f5a4c --- /dev/null +++ b/trunk/jsoncpp/devtools/agent_vmxp.json @@ -0,0 +1,26 @@ +{ + "cmake_variants" : [ + {"name": "generator", + "generators": [ + {"generator": [ + "Visual Studio 6", + "Visual Studio 7", + "Visual Studio 8 2005" + ] + } + ] + }, + {"name": "shared_dll", + "variables": [ + ["JSONCPP_LIB_BUILD_SHARED=true"], + ["JSONCPP_LIB_BUILD_SHARED=false"] + ] + }, + {"name": "build_type", + "build_types": [ + "debug", + "release" + ] + } + ] +} diff --git a/trunk/jsoncpp/include/json/config.h b/trunk/jsoncpp/include/json/config.h index c9b298d..810a56b 100644 --- a/trunk/jsoncpp/include/json/config.h +++ b/trunk/jsoncpp/include/json/config.h @@ -68,6 +68,9 @@ // Microsoft Visual Studio 6 only support conversion from __int64 to double // (no conversion from unsigned __int64). #define JSON_USE_INT64_DOUBLE_CONVERSION 1 +// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' characters in the debug information) +// All projects I've ever seen with VS6 were using this globally (not bothering with pragma push/pop). +#pragma warning(disable : 4786) #endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6 #if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008 From b2c30f96f99b60420567006335e88c6d50c9bfe9 Mon Sep 17 00:00:00 2001 From: blep Date: Thu, 9 May 2013 18:42:33 +0000 Subject: [PATCH 268/268] - CMake: added option to turn fail compilation if warning occurs, and warning level 4 with MSVC. - Fixed some warnings git-svn-id: svn://svn.code.sf.net/p/jsoncpp/code@268 1f120ed1-78a5-a849-adca-83f0a9e25bb6 --- trunk/jsoncpp/CMakeLists.txt | 19 +++++++++++++++++++ trunk/jsoncpp/src/lib_json/json_value.cpp | 6 +++--- trunk/jsoncpp/src/test_lib_json/jsontest.cpp | 10 ++++++---- trunk/jsoncpp/src/test_lib_json/jsontest.h | 2 +- 4 files changed, 29 insertions(+), 8 deletions(-) diff --git a/trunk/jsoncpp/CMakeLists.txt b/trunk/jsoncpp/CMakeLists.txt index 21c0ebe..2f88080 100644 --- a/trunk/jsoncpp/CMakeLists.txt +++ b/trunk/jsoncpp/CMakeLists.txt @@ -4,6 +4,7 @@ ENABLE_TESTING() OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON) OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON) +OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF) # Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix IF(NOT WIN32) @@ -61,9 +62,27 @@ MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINO CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in" "${PROJECT_SOURCE_DIR}/include/json/version.h" ) +macro(UseCompilationWarningAsError) + if ( MSVC ) + # Only enabled in debug because some old versions of VS STL generate + # warnings when compiled in release configuration. + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ") + endif( MSVC ) +endmacro() + # Include our configuration header INCLUDE_DIRECTORIES( ${CMAKE_SOURCE_DIR}/include ) +if ( MSVC ) + # Only enabled in debug because some old versions of VS STL generate + # unreachable code warning when compiled in release configuration. + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ") +endif( MSVC ) + +IF(JSONCPP_WITH_WARNING_AS_ERROR) + UseCompilationWarningAsError() +ENDIF(JSONCPP_WITH_WARNING_AS_ERROR) + # Build the different applications ADD_SUBDIRECTORY( src ) diff --git a/trunk/jsoncpp/src/lib_json/json_value.cpp b/trunk/jsoncpp/src/lib_json/json_value.cpp index 4f2e216..6fdb2d7 100644 --- a/trunk/jsoncpp/src/lib_json/json_value.cpp +++ b/trunk/jsoncpp/src/lib_json/json_value.cpp @@ -1805,7 +1805,7 @@ Path::makePath( const std::string &path, void -Path::addPathInArg( const std::string &path, +Path::addPathInArg( const std::string &/*path*/, const InArgs &in, InArgs::const_iterator &itInArg, PathArgument::Kind kind ) @@ -1826,8 +1826,8 @@ Path::addPathInArg( const std::string &path, void -Path::invalidPath( const std::string &path, - int location ) +Path::invalidPath( const std::string &/*path*/, + int /*location*/ ) { // Error: invalid path. } diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp index 327d344..e27a1eb 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.cpp +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.cpp @@ -480,10 +480,10 @@ Runner::runCommandLine( int argc, const char *argv[] ) const } -#if defined(_MSC_VER) +#if defined(_MSC_VER) && defined(_DEBUG) // Hook MSVCRT assertions to prevent dialog from appearing static int -msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) +msvcrtSilentReportHook( int reportType, char *message, int * /*returnValue*/ ) { // The default CRT handling of error and assertion is to display // an error dialog to the user. @@ -517,9 +517,11 @@ msvcrtSilentReportHook( int reportType, char *message, int *returnValue ) void Runner::preventDialogOnCrash() { -#if defined(_MSC_VER) +#if defined(_MSC_VER) && defined(_DEBUG) // Install a hook to prevent MSVCRT error and assertion from - // popping a dialog. + // popping a dialog + // This function a NO-OP in release configuration + // (which cause warning since msvcrtSilentReportHook is not referenced) _CrtSetReportHook( &msvcrtSilentReportHook ); #endif // if defined(_MSC_VER) diff --git a/trunk/jsoncpp/src/test_lib_json/jsontest.h b/trunk/jsoncpp/src/test_lib_json/jsontest.h index 6dc7535..8d3f46a 100644 --- a/trunk/jsoncpp/src/test_lib_json/jsontest.h +++ b/trunk/jsoncpp/src/test_lib_json/jsontest.h @@ -193,7 +193,7 @@ namespace JsonTest { if ( static_cast< U >( expected ) != actual ) { result.addFailure( file, line, expr ); - result << "Expected: " << expected << "\n"; + result << "Expected: " << static_cast< U >( expected ) << "\n"; result << "Actual : " << actual; } return result;